def test_last_page(self): page = 10 per_page = 10 total_count = 98 pagination = Pagination(page, per_page, total_count) self.assertTrue(pagination.has_prev) self.assertFalse(pagination.has_next) self.assertEqual(list(pagination.iter_pages()), [1, None, 9, 10])
def test_middle_page(self): page = 5 per_page = 10 total_count = 98 pagination = Pagination(page, per_page, total_count) self.assertTrue(pagination.has_prev) self.assertTrue(pagination.has_next) self.assertEqual(list(pagination.iter_pages()), [1, None, 4, 5, 6, 7, None, 10])
def load(page, per_page, sort_key): """Load objects for the table.""" # FIXME: Load tags in this way until wash_arguments handles lists. tags = request.args.getlist("tags[]") or [] # empty to show all sort_key = request.args.get( 'sort_key', session.get('holdingpen_sort_key', "modified") ) page = max(page, 1) per_page = per_page or session.get('holdingpen_per_page') or 25 current_app.logger.debug(tags) ids, total = get_holdingpen_objects( tags_list=tags, per_page=per_page, page=page, sort_key=sort_key ) current_app.logger.debug("Total hits: {0}".format(ids)) current_app.logger.debug(ids) pagination = Pagination(page, per_page, total) # Make sure requested page is within limits. if pagination.page > pagination.pages: pagination.page = pagination.pages pages_iteration = [] for iter_page in pagination.iter_pages(): res = {"page": iter_page} if iter_page == pagination.page: res["active"] = True else: res["active"] = False pages_iteration.append(res) table_data = { 'rows': [], 'pagination': { "page": pagination.page, "pages": pagination.pages, "iter_pages": pages_iteration, "per_page": pagination.per_page, "total_count": pagination.total_count } } # Add current ids in table for use by previous/next session['holdingpen_current_ids'] = ids session['holdingpen_sort_key'] = sort_key session['holdingpen_per_page'] = per_page session['holdingpen_page'] = page session['holdingpen_tags'] = tags table_data["rows"] = get_rows(ids) table_data["rendered_rows"] = "".join(table_data["rows"]) return jsonify(table_data)
def load(page, per_page, sort_key): """Load objects for the table.""" # FIXME: Load tags in this way until wash_arguments handles lists. tags = request.args.getlist("tags[]") or [] # empty to show all sort_key = request.args.get( 'sort_key', session.get('holdingpen_sort_key', "updated") ) per_page = per_page or session.get('holdingpen_per_page') or 10 object_list = get_holdingpen_objects(tags) object_list = sort_bwolist(object_list, sort_key) page = max(page, 1) pagination = Pagination(page, per_page, len(object_list)) # Make sure requested page is within limits. if pagination.page > pagination.pages: pagination.page = pagination.pages pages_iteration = [] for iter_page in pagination.iter_pages(): res = {"page": iter_page} if iter_page == pagination.page: res["active"] = True else: res["active"] = False pages_iteration.append(res) table_data = { 'rows': [], 'pagination': { "page": pagination.page, "pages": pagination.pages, "iter_pages": pages_iteration, "per_page": pagination.per_page, "total_count": pagination.total_count } } # Add current ids in table for use by previous/next session['holdingpen_current_ids'] = [o.id for o in object_list] session['holdingpen_sort_key'] = sort_key session['holdingpen_per_page'] = per_page session['holdingpen_tags'] = tags display_start = max(pagination.per_page * (pagination.page - 1), 0) display_end = min( pagination.per_page * pagination.page, pagination.total_count ) table_data["rows"] = get_rows(object_list[display_start:display_end]) table_data["rendered_rows"] = "".join(table_data["rows"]) return jsonify(table_data)
def search(collection, p, of, ot, so, sf, sp, rm, rg, jrec): """Render search page.""" if 'action_browse' in request.args \ or request.args.get('action', '') == 'browse': return browse() if 'c' in request.args and len(request.args) == 1 \ and len(request.args.getlist('c')) == 1: return redirect(url_for('.collection', name=request.args.get('c'))) if 'f' in request.args: args = request.args.copy() args['p'] = "{0}:{1}".format(args['f'], args['p']) del args['f'] return redirect(url_for('.search', **args)) # fix for queries like `/search?p=+ellis` p = p.strip().encode('utf-8') response = Query(p).search(collection=collection.name) response.body.update({ 'size': int(rg), 'from': jrec-1, 'aggs': cfg['SEARCH_ELASTIC_AGGREGATIONS'].get( collection.name.lower(), {} ) }) if sf in cfg['SEARCH_ELASTIC_SORT_FIELDS']: so = so if so in ('asc', 'desc') else '' sorting = { 'sort': { sf: { 'order': so } } } response.body.update(sorting) # FIXME refactor to separate search hook filtered_facets = '' from invenio_search.walkers.elasticsearch import ElasticSearchDSL if 'post_filter' in request.values and request.values['post_filter']: parsed_post_filter = Query(request.values.get('post_filter')) post_filter = parsed_post_filter.query.accept( ElasticSearchDSL() ) response.body['query'] = { "filtered": { 'query': response.body['query'], 'filter': post_filter } } # extracting the facet filtering from invenio_search.walkers.facets import FacetsVisitor filtered_facets = parsed_post_filter.query.accept( FacetsVisitor() ) # sets cannot be converted to json. use facetsVisitor to convert them # to lists filtered_facets = FacetsVisitor.jsonable(filtered_facets) else: # Save current query and number of hits in the user session session_key = 'last-query' + p + collection.name if not session.get(session_key): session[session_key] = {} session[session_key] = { "p": p, "collection": collection.name, "number_of_hits": len(response), "timestamp": datetime.datetime.utcnow() } number_of_hits = len(response) if number_of_hits and jrec > number_of_hits: args = request.args.copy() args['jrec'] = 1 return redirect(url_for('.search', **args)) pagination = Pagination((jrec-1) // rg + 1, rg, number_of_hits) ctx = dict( facets={}, # facets.get_facets_config(collection, qid), filtered_facets=filtered_facets, response=response, rg=rg, ot=ot, pagination=pagination, collection=collection, ) # TODO add search services # TODO add external collection search return response_formated_records(response.records(), of, **ctx)
def search(collection, p, of, ot, so, sf, sp, rm, rg, jrec): """Render search page.""" if 'action_browse' in request.args \ or request.args.get('action', '') == 'browse': return browse() if 'c' in request.args and len(request.args) == 1 \ and len(request.args.getlist('c')) == 1: return redirect(url_for('.collection', name=request.args.get('c'))) if 'f' in request.args: args = request.args.copy() args['p'] = "{0}:{1}".format(args['f'], args['p']) del args['f'] return redirect(url_for('.search', **args)) # fix for queries like `/search?p=+ellis` p = p.strip().encode('utf-8') collection_breadcrumbs(collection) response = Query(p).search(collection=collection.name) response.body.update({ 'size': int(rg), 'from': jrec-1, 'aggs': { "collection": {"terms": {"field": "_collections"}}, "author": {"terms": {"field": "authors.raw"}}, }, }) # FIXME refactor to separate search hook filtered_facets = '' from invenio_search.walkers.elasticsearch import ElasticSearchDSL if 'post_filter' in request.values: parsed_post_filter = Query(request.values.get('post_filter')) post_filter = parsed_post_filter.query.accept( ElasticSearchDSL() ) response.body['post_filter'] = post_filter # extracting the facet filtering from invenio_search.walkers.facets import FacetsVisitor filtered_facets = parsed_post_filter.query.accept( FacetsVisitor() ) # sets cannot be converted to json. use facetsVisitor to convert them # to lists filtered_facets = FacetsVisitor.jsonable(filtered_facets) if len(response) and jrec > len(response): args = request.args.copy() args['jrec'] = 1 return redirect(url_for('.search', **args)) pagination = Pagination((jrec-1) // rg + 1, rg, len(response)) ctx = dict( facets={}, # facets.get_facets_config(collection, qid), filtered_facets=filtered_facets, response=response, rg=rg, create_nearest_terms_box=lambda: _("Try to modify the query."), easy_search_form=EasySearchForm(csrf_enabled=False), ot=ot, pagination=pagination, collection=collection, ) # TODO add search services # TODO add external collection search return response_formated_records(response.records(), of, **ctx)