def list_case_studies(): page = get_valid_page_or_1() supplier_code = get_int_or_400(request.args, 'supplier_code') case_studies = CaseStudy.query if supplier_code is not None: case_studies = case_studies.filter( CaseStudy.supplier_code == supplier_code) if supplier_code: return jsonify(caseStudies=[ case_study.serialize() for case_study in case_studies.all() ], links={ 'self': url_for('.list_case_studies', supplier_code=supplier_code) }) results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_PAGE_SIZE']) case_studies = case_studies.paginate(page=page, per_page=results_per_page) return jsonify(caseStudies=[ case_study.serialize() for case_study in case_studies.items ], links=pagination_links(case_studies, '.list_case_studies', request.args))
def list_case_studies(): page = get_valid_page_or_1() supplier_code = get_int_or_400(request.args, 'supplier_code') case_studies = CaseStudy.query if supplier_code is not None: case_studies = case_studies.filter(CaseStudy.supplier_code == supplier_code) if supplier_code: return jsonify( caseStudies=[case_study.serialize() for case_study in case_studies.all()], links={'self': url_for('.list_case_studies', supplier_code=supplier_code)} ) results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_PAGE_SIZE'] ) case_studies = case_studies.paginate( page=page, per_page=results_per_page ) return jsonify( caseStudies=[case_study.serialize() for case_study in case_studies.items], links=pagination_links( case_studies, '.list_case_studies', request.args ) )
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config['PAGE_SIZE'], statuses=statuses ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: start = job_data['processing_started'] start = dateutil.parser.parse(start).replace(tzinfo=None) if start else None if start is None: statistics = [] elif start.replace(tzinfo=None) < midnight_n_days_ago(3): # ft_notification_status table statistics = fetch_notification_statuses_for_job(job_data['id']) else: # notifications table statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id']) job_data['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in statistics] return { 'data': data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links( pagination, '.get_jobs_by_service', service_id=service_id ) }
def get_uploaded_letter_by_service_and_print_day(service_id, letter_print_date): try: letter_print_datetime = datetime.strptime(letter_print_date, '%Y-%m-%d') except ValueError: abort(400) pagination = dao_get_uploaded_letters_by_print_date( service_id, letter_print_date=letter_print_datetime, page=request.args.get('page', type=int), page_size=current_app.config['PAGE_SIZE'] ) return jsonify({ 'notifications': notification_with_template_schema.dump( pagination.items, many=True, ).data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links( pagination, '.get_uploaded_letter_by_service_and_print_day', service_id=service_id, letter_print_date=letter_print_date, ), })
def get_all_notifications(): data = notifications_filter_schema.load(request.args).data include_jobs = data.get("include_jobs", False) page = data.get("page", 1) page_size = data.get("page_size", current_app.config.get("API_PAGE_SIZE")) limit_days = data.get("limit_days") pagination = notifications_dao.get_notifications_for_service( str(authenticated_service.id), personalisation=True, filter_dict=data, page=page, page_size=page_size, limit_days=limit_days, key_type=api_user.key_type, include_jobs=include_jobs, ) return ( jsonify( notifications=notification_with_personalisation_schema.dump( pagination.items, many=True).data, page_size=page_size, total=pagination.total, links=pagination_links(pagination, ".get_all_notifications", **request.args.to_dict()), ), 200, )
def get_all_notifications_for_service(service_id): data = notifications_filter_schema.load(request.args).data if data.get('to'): notification_type = data.get('template_type')[0] if data.get('template_type') else None return search_for_notification_by_to_field(service_id=service_id, search_term=data['to'], statuses=data.get('status'), notification_type=notification_type) page = data['page'] if 'page' in data else 1 page_size = data['page_size'] if 'page_size' in data else current_app.config.get('PAGE_SIZE') limit_days = data.get('limit_days') include_jobs = data.get('include_jobs', True) include_from_test_key = data.get('include_from_test_key', False) pagination = notifications_dao.get_notifications_for_service( service_id, filter_dict=data, page=page, page_size=page_size, limit_days=limit_days, include_jobs=include_jobs, include_from_test_key=include_from_test_key ) kwargs = request.args.to_dict() kwargs['service_id'] = service_id return jsonify( notifications=notification_with_template_schema.dump(pagination.items, many=True).data, page_size=page_size, total=pagination.total, links=pagination_links( pagination, '.get_all_notifications_for_service', **kwargs ) ), 200
def get_all_complaints(): page = int(request.args.get('page', 1)) pagination = fetch_paginated_complaints(page=page) return jsonify(complaints=[x.serialize() for x in pagination.items], links=pagination_links(pagination, '.get_all_complaints', **request.args.to_dict())), 200
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config['PAGE_SIZE'], statuses=statuses) data = job_schema.dump(pagination.items, many=True).data for job_data in data: statistics = dao_get_notification_outcomes_for_job( service_id, job_data['id']) job_data['statistics'] = [{ 'status': statistic[1], 'count': statistic[0] } for statistic in statistics] return { 'data': data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links(pagination, '.get_jobs_by_service', service_id=service_id) }
def get_all_notifications_for_service_job(service_id, job_id): data = notifications_filter_schema.load(request.args).data page = data['page'] if 'page' in data else 1 page_size = data[ 'page_size'] if 'page_size' in data else current_app.config.get( 'PAGE_SIZE') paginated_notifications = get_notifications_for_job(service_id, job_id, filter_dict=data, page=page, page_size=page_size) kwargs = request.args.to_dict() kwargs['service_id'] = service_id kwargs['job_id'] = job_id notifications = None if data.get('format_for_csv'): notifications = [ notification.serialize_for_csv() for notification in paginated_notifications.items ] else: notifications = notification_with_template_schema.dump( paginated_notifications.items, many=True).data return jsonify(notifications=notifications, page_size=page_size, total=paginated_notifications.total, links=pagination_links( paginated_notifications, '.get_all_notifications_for_service_job', **kwargs)), 200
def list_agreements_signed(supplier_code): current_only = request.args.get('current_only', False) page = get_valid_page_or_1() agreements = SignedAgreement.query.filter(SignedAgreement.supplier_code == supplier_code) if current_only: agreements = agreements.outerjoin(Agreement, Agreement.id == SignedAgreement.agreement_id)\ .filter(Agreement.is_current) results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_PAGE_SIZE'] ) agreements = agreements.paginate( page=page, per_page=results_per_page ) return jsonify( agreements=[agreement.serialize() for agreement in agreements.items], links=pagination_links( agreements, '.list_agreements', request.args ) )
def list_agreements(): current_only = request.args.get('current_only', False) page = get_valid_page_or_1() agreements = Agreement.query if current_only: agreements = agreements.filter(Agreement.is_current) results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_PAGE_SIZE'] ) agreements = agreements.paginate( page=page, per_page=results_per_page ) return jsonify( agreements=[agreement.serialize() for agreement in agreements.items], links=pagination_links( agreements, '.list_agreements', request.args ) )
def get_paginated_uploads(service_id, limit_days, page): pagination = dao_get_uploads_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config['PAGE_SIZE']) uploads = pagination.items data = [] for upload in uploads: upload_dict = { 'id': upload.id, 'original_file_name': upload.original_file_name, 'notification_count': upload.notification_count, 'created_at': upload.scheduled_for.strftime("%Y-%m-%d %H:%M:%S") if upload.scheduled_for else upload.created_at.strftime("%Y-%m-%d %H:%M:%S"), 'upload_type': upload.upload_type, 'template_type': upload.template_type, 'recipient': upload.recipient, } if upload.upload_type == 'job': start = upload.processing_started if start is None: statistics = [] elif start.replace(tzinfo=None) < midnight_n_days_ago(3): # ft_notification_status table statistics = fetch_notification_statuses_for_job(upload.id) else: # notifications table statistics = dao_get_notification_outcomes_for_job( service_id, upload.id) upload_dict['statistics'] = [{ 'status': statistic.status, 'count': statistic.count } for statistic in statistics] else: upload_dict['statistics'] = [] data.append(upload_dict) return { 'data': data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links(pagination, '.get_uploads_by_service', service_id=service_id) }
def get_all_notifications_for_service(service_id): data = notifications_filter_schema.load(request.args).data if data.get("to"): notification_type = data.get("template_type")[0] if data.get( "template_type") else None return search_for_notification_by_to_field( service_id=service_id, search_term=data["to"], statuses=data.get("status"), notification_type=notification_type, ) page = data["page"] if "page" in data else 1 page_size = data[ "page_size"] if "page_size" in data else current_app.config.get( "PAGE_SIZE") limit_days = data.get("limit_days") include_jobs = data.get("include_jobs", True) include_from_test_key = data.get("include_from_test_key", False) include_one_off = data.get("include_one_off", True) count_pages = data.get("count_pages", True) pagination = notifications_dao.get_notifications_for_service( service_id, filter_dict=data, page=page, page_size=page_size, count_pages=count_pages, limit_days=limit_days, include_jobs=include_jobs, include_from_test_key=include_from_test_key, include_one_off=include_one_off, ) kwargs = request.args.to_dict() kwargs["service_id"] = service_id if data.get("format_for_csv"): notifications = [ notification.serialize_for_csv() for notification in pagination.items ] else: notifications = notification_with_template_schema.dump( pagination.items, many=True).data return ( jsonify( notifications=notifications, page_size=page_size, total=pagination.total, links=pagination_links(pagination, ".get_all_notifications_for_service", **kwargs), ), 200, )
def get_callbacks_failing_for_all_services(): page = int(request.args.get('page', 1)) paginated_callbacks = dao_get_callback_failures_for_all_services(page=page) return jsonify(callbacks=[ callback.serialize() for callback in paginated_callbacks.items ], links=pagination_links( paginated_callbacks, '.get_callbacks_failing_for_all_services', **request.args.to_dict())), 200
def list_suppliers(): page = get_valid_page_or_1() prefix = request.args.get('prefix', '') name = request.args.get('name', None) results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_SUPPLIERS_PAGE_SIZE'] ) if name is None: suppliers = Supplier.query.filter(Supplier.abn.is_(None) | (Supplier.abn != Supplier.DUMMY_ABN)) else: suppliers = Supplier.query.filter((Supplier.name == name) | (Supplier.long_name == name)) suppliers = suppliers.filter(Supplier.status != 'deleted') if prefix: if prefix == 'other': suppliers = suppliers.filter( Supplier.name.op('~')('^[^A-Za-z]')) else: suppliers = suppliers.outerjoin(SupplierContact).outerjoin(Contact) # case insensitive LIKE comparison for matching supplier names, supplier email and contact email suppliers = suppliers.filter(or_( Supplier.name.ilike(prefix + '%'), Supplier.data['email'].astext.ilike('%{}%'.format(prefix)), Contact.email.ilike('%{}%'.format(prefix)) )) suppliers = suppliers.distinct(Supplier.name, Supplier.code) try: if results_per_page > 0: paginator = suppliers.paginate( page=page, per_page=results_per_page, ) links = pagination_links( paginator, '.list_suppliers', request.args ) supplier_results = paginator.items else: links = { 'self': url_for('.list_suppliers', _external=True, **request.args), } supplier_results = suppliers.all() supplier_data = [supplier.serializable for supplier in supplier_results] except DataError: abort(400, 'invalid framework') return jsonify(suppliers=supplier_data, links=links)
def list_suppliers(): page = get_valid_page_or_1() prefix = request.args.get('prefix', '') name = request.args.get('name', None) results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_SUPPLIERS_PAGE_SIZE']) if name is None: suppliers = Supplier.query.filter( Supplier.abn.is_(None) | (Supplier.abn != Supplier.DUMMY_ABN)) else: suppliers = Supplier.query.filter((Supplier.name == name) | (Supplier.long_name == name)) suppliers = suppliers.filter(Supplier.status != 'deleted') if prefix: if prefix == 'other': suppliers = suppliers.filter(Supplier.name.op('~')('^[^A-Za-z]')) else: suppliers = suppliers.outerjoin(SupplierContact).outerjoin(Contact) # case insensitive LIKE comparison for matching supplier names, supplier email and contact email suppliers = suppliers.filter( or_(Supplier.name.ilike(prefix + '%'), Supplier.data['email'].astext.ilike('%{}%'.format(prefix)), Contact.email.ilike('%{}%'.format(prefix)))) suppliers = suppliers.distinct(Supplier.name, Supplier.code) try: if results_per_page > 0: paginator = suppliers.paginate( page=page, per_page=results_per_page, ) links = pagination_links(paginator, '.list_suppliers', request.args) supplier_results = paginator.items else: links = { 'self': url_for('.list_suppliers', _external=True, **request.args), } supplier_results = suppliers.all() supplier_data = [ supplier.serializable for supplier in supplier_results ] except DataError: abort(400, 'invalid framework') return jsonify(suppliers=supplier_data, links=links)
def list_users(): user_query = User.query.order_by(User.id) page = get_valid_page_or_1() results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_USER_PAGE_SIZE']) # email_address is a primary key email_address = request.args.get('email_address') if email_address: user = user_query.filter( User.email_address == email_address.lower()).first_or_404() return jsonify(users=[user.serialize()], links={}) supplier_code = request.args.get('supplier_code') if supplier_code is not None: try: supplier_code = int(supplier_code) except ValueError: abort(400, "Invalid supplier_code: {}".format(supplier_code)) supplier = Supplier.query.filter(Supplier.code == supplier_code).all() if not supplier: abort(404, "supplier_code '{}' not found".format(supplier_code)) user_query = user_query.filter(User.supplier_code == supplier_code) application_id = request.args.get('application_id') if application_id is not None: try: application_id = int(application_id) except ValueError: abort(400, "Invalid application_id: {}".format(application_id)) application = Application.query.filter( Application.id == application_id).all() if not application: abort(404, "application_id '{}' not found".format(application_id)) user_query = user_query.filter(User.application_id == application_id) # simple means we don't load the relationships if request.args.get('simple'): user_query = (user_query.options(noload('supplier')).options( noload('application')).options(noload('frameworks'))) users = user_query.paginate( page=page, per_page=results_per_page, ) return jsonify(users=[u.serialize() for u in users.items], links=pagination_links(users, '.list_users', request.args))
def format_applications(applications, with_task_status): if request.args.get('order_by', None) == 'application.status desc, created_at desc': order_by = ['application.status desc', 'created_at desc'] else: order_by = ['application.created_at desc'] applications = applications.order_by(*order_by) page = get_valid_page_or_1() results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_APPLICATIONS_PAGE_SIZE'] ) applications = applications.paginate( page=page, per_page=results_per_page ) apps_results = [_.serializable for _ in applications.items] if with_task_status and current_app.config['JIRA_FEATURES']: jira = get_marketplace_jira() tasks_by_id = jira.assessment_tasks_by_application_id() def annotate_app(app): try: app['tasks'] = tasks_by_id.get(str(app['id']), None) except KeyError: pass return app apps_results = [annotate_app(_) for _ in apps_results] return jsonify( applications=apps_results, links=pagination_links( applications, '.list_applications', request.args ), meta={ "total": applications.total, "per_page": results_per_page } )
def get_states(): states = State.objects.paginate(page=page_num(), per_page=per_page()) total_states = State.objects.count() data = [] response = {} if states: for state in states.items: data.append({ 'id': str(state['id']), 'title': state['title'][get_locale()][0], 'code': state['code'] }) response = { 'data': data, 'links': pagination_links(states, total_states), 'meta': pagination_meta_data(states, total_states) } return response
def get_cities(): cities = City.objects.paginate(page=page_num(), per_page=per_page()) total_cities = City.objects.count() data = [] response = {} if cities: for city in cities.items: data.append({ 'id': str(city['id']), 'title': city['title'][get_locale()][0], 'code': city['code'] }) response = { 'data': data, 'links': pagination_links(cities, total_cities), 'meta': pagination_meta_data(cities, total_cities) } return response
def search_for_notification_by_to_field(service_id, search_term, statuses, notification_type): results = notifications_dao.dao_get_notifications_by_recipient_or_reference( service_id=service_id, search_term=search_term, statuses=statuses, notification_type=notification_type, page=1, page_size=current_app.config['PAGE_SIZE'], ) return jsonify( notifications=notification_with_template_schema.dump(results.items, many=True).data, links=pagination_links( results, '.get_all_notifications_for_service', statuses=statuses, notification_type=notification_type, service_id=service_id, ), ), 200
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config["PAGE_SIZE"], statuses=statuses, ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: start = job_data["processing_started"] start = dateutil.parser.parse(start).replace( tzinfo=None) if start else None if start is None: statistics = [] elif start.replace(tzinfo=None) < midnight_n_days_ago(3): # ft_notification_status table statistics = fetch_notification_statuses_for_job(job_data["id"]) else: # notifications table statistics = dao_get_notification_outcomes_for_job( service_id, job_data["id"]) job_data["statistics"] = [{ "status": statistic.status, "count": statistic.count } for statistic in statistics] return { "data": data, "page_size": pagination.per_page, "total": pagination.total, "links": pagination_links(pagination, ".get_jobs_by_service", service_id=service_id), }
def get_paginated_jobs(service_id, limit_days, statuses, page): pagination = dao_get_jobs_by_service_id( service_id, limit_days=limit_days, page=page, page_size=current_app.config['PAGE_SIZE'], statuses=statuses ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id']) job_data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics] return { 'data': data, 'page_size': pagination.per_page, 'total': pagination.total, 'links': pagination_links( pagination, '.get_jobs_by_service', service_id=service_id ) }
def get_all_notifications_for_service_job(service_id, job_id): data = notifications_filter_schema.load(request.args).data page = data["page"] if "page" in data else 1 page_size = data[ "page_size"] if "page_size" in data else current_app.config.get( "PAGE_SIZE") paginated_notifications = get_notifications_for_job(service_id, job_id, filter_dict=data, page=page, page_size=page_size) kwargs = request.args.to_dict() kwargs["service_id"] = service_id kwargs["job_id"] = job_id notifications = None if data.get("format_for_csv"): notifications = [ notification.serialize_for_csv() for notification in paginated_notifications.items ] else: notifications = notification_with_template_schema.dump( paginated_notifications.items, many=True).data return ( jsonify( notifications=notifications, page_size=page_size, total=paginated_notifications.total, links=pagination_links(paginated_notifications, ".get_all_notifications_for_service_job", **kwargs), ), 200, )
def list_work_orders(): page = get_valid_page_or_1() brief_id = get_int_or_400(request.args, 'brief_id') supplier_code = get_int_or_400(request.args, 'supplier_code') work_orders = WorkOrder.query if supplier_code is not None: work_orders = work_orders.filter(WorkOrder.supplier_code == supplier_code) if brief_id is not None: work_orders = work_orders.filter(WorkOrder.brief_id == brief_id) if brief_id or supplier_code: return jsonify( workOrders=[work_order.serialize() for work_order in work_orders.all()], links={'self': url_for('.list_work_orders', supplier_code=supplier_code, brief_id=brief_id)} ) results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_PAGE_SIZE'] ) work_orders = work_orders.paginate( page=page, per_page=results_per_page ) return jsonify( workOrders=[work_order.serialize() for work_order in work_orders.items], links=pagination_links( work_orders, '.list_work_orders', request.args ) )
def get_all_notifications_for_service_job(service_id, job_id): data = notifications_filter_schema.load(request.args).data page = data['page'] if 'page' in data else 1 page_size = data['page_size'] if 'page_size' in data else current_app.config.get('PAGE_SIZE') pagination = get_notifications_for_job( service_id, job_id, filter_dict=data, page=page, page_size=page_size) kwargs = request.args.to_dict() kwargs['service_id'] = service_id kwargs['job_id'] = job_id return jsonify( notifications=notification_with_template_schema.dump(pagination.items, many=True).data, page_size=page_size, total=pagination.total, links=pagination_links( pagination, '.get_all_notifications_for_service_job', **kwargs ) ), 200
def get_districts(): state_id = request.args.get('state_id') if state_id: qs = District.objects(state_id=state_id) districts = qs.paginate(page=page_num(), per_page=per_page()) total_districts = qs.count() else: districts = District.objects.paginate(page=page_num(), per_page=per_page()) total_districts = District.objects.count() data = [] response = {} if districts: for district in districts.items: data.append({ 'id': str(district['id']), 'title': district['title'][get_locale()][0], 'code': district['code'] }) response = { 'data': data, 'links': pagination_links(districts, total_districts), 'meta': pagination_meta_data(districts, total_districts) } return response
def get_civic_agencies(): district_id = request.args.get('district_id') print(district_id) if district_id: qs = CivicAgency.objects(district_id=district_id) civic_agencies = qs.paginate(page=page_num(), per_page=per_page()) total_civic_agencies = qs.count() else: civic_agencies = CivicAgency.objects.paginate(page=page_num(), per_page=per_page()) total_civic_agencies = CivicAgency.objects.count() data = [] response = {} if civic_agencies: for civic_agency in civic_agencies.items: data.append({ 'id': str(civic_agency['id']), 'title': civic_agency['title'][get_locale()][0], }) response = { 'data': data, 'links': pagination_links(civic_agencies, total_civic_agencies), 'meta': pagination_meta_data(civic_agencies, total_civic_agencies) } return response
def list_projects(): page = get_valid_page_or_1() projects = Project.query results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_PAGE_SIZE'] ) projects = projects.paginate( page=page, per_page=results_per_page ) return jsonify( projects=[project.serialize() for project in projects.items], links=pagination_links( projects, '.list_projects', request.args ) )
def list_users(): user_query = User.query.order_by(User.id) page = get_valid_page_or_1() results_per_page = get_positive_int_or_400( request.args, 'per_page', current_app.config['DM_API_USER_PAGE_SIZE'] ) # email_address is a primary key email_address = request.args.get('email_address') if email_address: user = user_query.filter( User.email_address == email_address.lower() ).first_or_404() return jsonify( users=[user.serialize()], links={} ) supplier_code = request.args.get('supplier_code') if supplier_code is not None: try: supplier_code = int(supplier_code) except ValueError: abort(400, "Invalid supplier_code: {}".format(supplier_code)) supplier = Supplier.query.filter(Supplier.code == supplier_code).all() if not supplier: abort(404, "supplier_code '{}' not found".format(supplier_code)) user_query = user_query.filter(User.supplier_code == supplier_code) application_id = request.args.get('application_id') if application_id is not None: try: application_id = int(application_id) except ValueError: abort(400, "Invalid application_id: {}".format(application_id)) application = Application.query.filter(Application.id == application_id).all() if not application: abort(404, "application_id '{}' not found".format(application_id)) user_query = user_query.filter(User.application_id == application_id) # simple means we don't load the relationships if request.args.get('simple'): user_query = ( user_query .options(noload('supplier')) .options(noload('application')) .options(noload('frameworks')) ) users = user_query.paginate( page=page, per_page=results_per_page, ) return jsonify( users=[u.serialize() for u in users.items], links=pagination_links( users, '.list_users', request.args ) )