def search(request, query): params = search_string_to_params(query) normalized = params_to_search_string(params) main_search = MainappSearch(params, limit=settings.SEARCH_PAGINATION_LENGTH) for error in main_search.errors: messages.error(request, error) try: handle_subscribe_requests( request, params, _("You will now receive notifications about new search results."), _("You will no longer receive notifications."), _("You have already subscribed to this search."), ) except NeedsLoginError as err: return redirect(err.redirect_url) executed = main_search.execute() results = [parse_hit(hit) for hit in executed.hits] context = _search_to_context(normalized, main_search, executed, results, request) context["new_facets"] = aggs_to_context(executed) return render(request, "mainapp/search/search.html", context)
def items(self, query): params = search_string_to_params(query) main_search = MainappSearch(params, limit=settings.SEARCH_PAGINATION_LENGTH) executed = main_search.execute() results = [parse_hit(hit, highlighting=False) for hit in executed.hits] return results
def perform_search(self, alert: UserAlert) -> List[dict]: if self.override_since is not None: since = self.override_since elif alert.last_match is not None: since = alert.last_match else: since = timezone.now() - self.fallback_timeframe search = MainappSearch( alert.get_search_params(), extra_filter=[Q("range", modified={"gte": since.isoformat()})], ) executed = search.execute() return [parse_hit(hit) for hit in executed.hits]
def handle(self, *args, **options): """ The checks: * "rese" should match "research", but currently doesn't * "contain(|sng|ing)" should match "containing" by stemming, preserving the original and fuzzy * "here" matches "here's" due to language analysis * "Knutt" should prefer "Knutt" over "Knuth", but currently prefers frequency * "Schulhaus" is for big german dataset performance """ if options.get("rebuild"): start = time.perf_counter() call_command( "search_index", action="rebuild", force=True, models=["mainapp.Person"] ) end = time.perf_counter() self.stdout.write("Total: {}\n".format(end - start)) words = ["containing", "here's"] for word in words: self.stdout.write( "{} {}\n".format( word, [token["token"] for token in self.analyze(word)["tokens"]] ) ) queries = [ "rese", "contain", "containsng", "containing", "here", "Knutt", "Schulhaus", ] for query in queries: params = search_string_to_params(query) main_search = MainappSearch(params) executed = main_search.execute() self.stdout.write( "# {}: {} | {}\n".format(query, len(executed.hits), executed.took) ) for hit in executed.hits: hit = parse_hit(hit) highlight = ( str(hit.get("highlight")) .replace("\n", " ") .replace("\r", " ")[:100] ) self.stdout.write(" - {}, {}\n".format(hit["name"][:30], highlight))
def perform_search(self, alert: UserAlert) -> List[dict]: if self.override_since is not None: since = self.override_since elif alert.last_match is not None: since = alert.last_match else: since = timezone.now() - self.fallback_timeframe params = alert.get_search_params() params["after"] = str(since) mainapp_search = MainappSearch(params) executed = mainapp_search.execute() results = [parse_hit(hit) for hit in executed.hits] return results
def search_results_only(request, query): """Returns only the result list items. Used for the endless scrolling""" params = search_string_to_params(query) normalized = params_to_search_string(params) after = int(request.GET.get("after", 0)) main_search = MainappSearch(params, offset=after, limit=settings.SEARCH_PAGINATION_LENGTH) executed = main_search.execute() # The mocked results don't have a took value logger.debug("Elasticsearch query took {}ms".format( executed.to_dict().get("took"))) results = [parse_hit(hit) for hit in executed.hits] context = _search_to_context(normalized, main_search, executed, results, request) assert executed.hits.total["relation"] in ["eq", "gte"] total_results = executed.hits.total if not isinstance(total_results, dict): total_results = total_results.to_dict() result = { "results": loader.render_to_string("mainapp/search/results_section.html", context, request), "total_results": total_results, "subscribe_widget": loader.render_to_string("partials/subscribe_widget.html", context, request), "more_link": reverse(search_results_only, args=[normalized]), # TOOD: Currently we need both because the js for the dropdown facet # and document type facet hasn't been unified "facets": executed.facets.to_dict(), "new_facets": aggs_to_context(executed), "query": normalized, } return JsonResponse(result, safe=False)