def search_pages_results(request, view_type='gallery'): page_title = "Search Results" paginator = search_pages_paginator(request) q = paginator.query try: page = paginator.page(paginator._cur_page) except InvalidPage: url = urls.reverse('openoni_search_pages_results') # Set the page to the first page q['page'] = 1 return HttpResponseRedirect('%s?%s' % (url, q.urlencode())) start = page.start_index() end = page.end_index() # figure out the next page number query = request.GET.copy() if page.has_next(): query['page'] = paginator._cur_page + 1 next_url = '?' + query.urlencode() # and the previous page number if page.has_previous(): query['page'] = paginator._cur_page - 1 previous_url = '?' + query.urlencode() rows = query.get("rows", "20") sort = query.get("sort", default="relevance") seq_check = "checked" if query.get("sequence", "0") == "1" else "" crumbs = list(settings.BASE_CRUMBS) host = request.get_host() format = request.GET.get('format', None) if format == 'atom': feed_url = settings.BASE_URL + request.get_full_path() updated = rfc3339(timezone.now()) return render(request, 'search/search_pages_results.xml', locals(), content_type='application/atom+xml') elif format == 'json': results = { 'startIndex': start, 'endIndex': end, 'totalItems': paginator.count, 'itemsPerPage': rows, 'items': [p.solr_doc for p in page.object_list], } for i in results['items']: i['url'] = settings.BASE_URL + i['id'].rstrip('/') + '.json' json_text = json.dumps(results, indent=2) # jsonp? if request.GET.get('callback') is not None: json_text = "%s(%s);" % (request.GET.get('callback'), json_text) return HttpResponse(json_text, content_type='application/json') page_range_short = list(_page_range_short(paginator, page)) # copy the current request query without the page and sort # query params so we can construct links with it in the template q = request.GET.copy() for i in ('page', 'sort'): if i in q: q.pop(i) q = q.urlencode() # get an pseudo english version of the query english_search = paginator.englishify() form = forms.SearchResultsForm({"rows": rows, "sort": sort}) if view_type == "list": template = "search/search_pages_results_list.html" else: template = "search/search_pages_results.html" page_list = [] lccns = query.getlist("lccn") titles = [] for lccn in lccns: name = str(models.Title.objects.get(lccn=lccn)) titles.append({ 'abbrev': name[:24] + '...' if len(name) > 24 else name, 'lccn': lccn, 'name': name, }) for count in range(len(page.object_list)): page_list.append((count + start, page.object_list[count])) start_year, end_year = fulltext_range() searching_all_dates = False if request.GET.get('date1') and request.GET.get('date2'): if request.GET.get('date1') == str(start_year) + '-01-01': if request.GET.get('date2') == str(end_year) + '-12-31': searching_all_dates = True return render(request, template, locals())
for i in results['items']: i['url'] = settings.BASE_URL + i['id'].rstrip("/") + ".json" json_text = json.dumps(results, indent=2) # jsonp? if request.GET.get('callback') is not None: json_text = "%s(%s);" % (request.GET.get('callback'), json_text) return HttpResponse(json_text, content_type='application/json') sort = request.GET.get('sort', 'relevance') q = request.GET.copy() if 'page' in q: del q['page'] if 'sort' in q: del q['sort'] q = q.urlencode() collapse_search_tab = True form = forms.SearchResultsForm({"rows": rows, "sort": sort}) return render(request, 'search/search_titles_results.html', locals()) @cache_page(settings.DEFAULT_TTL_SECONDS) @rdf_view def newspapers_rdf(request): titles = models.Title.objects.filter(has_issues=True) graph = titles_to_graph(titles) return HttpResponse(graph.serialize(base=_rdf_base(request), include_base=True), content_type='application/rdf+xml')