def test_import_by_name_sqlite_db_vendor(self): # This should return the fallback backend, because the SQLite backend doesn't support versions less than 3.19.0 if not fts5_available(): from wagtail.search.backends.database.fallback import DatabaseSearchBackend db = get_search_backend(backend='default') self.assertIsInstance(db, DatabaseSearchBackend) else: from wagtail.search.backends.database.sqlite.sqlite import SQLiteSearchBackend db = get_search_backend(backend='default') self.assertIsInstance(db, SQLiteSearchBackend)
def test_import_by_path_sqlite_db_vendor(self): # Same as above if not fts5_available(): from wagtail.search.backends.database.fallback import DatabaseSearchBackend db = get_search_backend(backend='wagtail.search.backends.database') self.assertIsInstance(db, DatabaseSearchBackend) else: from wagtail.search.backends.database.sqlite.sqlite import SQLiteSearchBackend db = get_search_backend(backend='wagtail.search.backends.database') self.assertIsInstance(db, SQLiteSearchBackend)
def test_import_by_path_sqlite_db_vendor(self): # Same as above if sqlite3.sqlite_version_info < (3, 19, 0): from wagtail.search.backends.database.fallback import DatabaseSearchBackend db = get_search_backend(backend='wagtail.search.backends.database') self.assertIsInstance(db, DatabaseSearchBackend) else: from wagtail.search.backends.database.sqlite.sqlite import SQLiteSearchBackend db = get_search_backend(backend='wagtail.search.backends.database') self.assertIsInstance(db, SQLiteSearchBackend)
def test_search_works_with_multisite(self): self.backend = get_search_backend('default') self.backend.reset_index() self.mk_article( self.english_section, title="Site 1 article") self.mk_article( self.yourmind2, title="Site 2 article") self.backend.refresh_index() response = self.client.get(reverse('search'), { 'q': 'article' }) self.assertContains(response, 'Site 1 article') self.assertNotContains(response, 'Site 2 article') client = Client(HTTP_HOST=self.site2.hostname) response = client.get(reverse('search'), { 'q': 'article' }) self.assertNotContains(response, 'Site 1 article') self.assertContains(response, 'Site 2 article') response = self.client.get(reverse('search'), { 'q': 'magic' }) self.assertContains(response, 'No search results for magic') response = self.client.get(reverse('search')) self.assertContains(response, 'No search results for None')
def get_member_laps(context): # NB this returns a core.Page, not the implementation-specific model used # so object-comparison to self will return false as objects would differ request = context['request'] search_query = request.GET.get('query', '') best = str(request.GET.get('best', None)) page = request.GET.get('page', 1) pagination_settings = PaginationSettings.for_site(request.site) path_info = request.META['PATH_INFO'] user = User.objects.filter(username=request.user).first() if hasattr(user, 'racer'): racer_name = user.racer.name else: racer_name = '' if best == 'None': is_best = False else: is_best = True search_query = racer_name + ' ' + search_query if search_query: s = get_search_backend() if is_best: search_results = s.search( search_query, Lap.objects.order_by('-lap_date').filter(best=is_best), operator="and", order_by_relevance=False) else: search_results = s.search(search_query, Lap.objects.order_by('-lap_date'), operator="and", order_by_relevance=False) #search_results = Page.objects.live().search(search_query) query = Query.get(search_query) # Record hit query.add_hit() else: #search_results = Page.objects.none() if is_best: search_results = Lap.objects.all().order_by('-lap_date').filter( best=is_best) else: search_results = Lap.objects.all().order_by('-lap_date') # Pagination paginator = Paginator(search_results, pagination_settings.items_per_page) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) if is_best == False: is_best = 'None' return search_results
def search(self, request): subpage = {'title': 'Search', 'url': ''} context = {'page': self, 'subpage': subpage, 'headline': ''} # Search search_query = request.GET.get('query', None) if search_query: page_results = Page.objects.live().not_type( (BlogIndexPage)).search(search_query).annotate_score("score") # Log the query so Wagtail can suggest promoted results Query.get(search_query).add_hit() # Also query non-wagtail models s = get_search_backend() model_results = s.search( search_query, BaseImpactModel.objects.filter(impact_model__public=True)) else: page_results = [] model_results = [] context.update({ 'search_query': search_query, 'page_results': page_results, 'model_results': model_results, }) # raise Exception(dir(model_results[0])) # raise Exception(search_results) # Render template return render(request, 'pages/search_page.html', context)
def resolve(self, _, info, input): query = input['query'] if not query: # don't want to find anything by an empty query return { 'results': [], 'more': False, } qs = get_search_backend().search( query, models.User, fields=['email', 'first_name', 'last_name']) # TODO - logic copy-pasted from kocherga.wagtail.schema.queries.search, generalize limit = input.pop('limit', None) or 10 # Ask for one more to determine if there are more results qs = qs[:limit + 1] results = list(qs) more = len(results) > limit results = results[:limit] return { 'results': results, 'more': more, }
def filter_queryset(self, request, queryset, view): """ This performs a full-text search on the result set Eg: ?search=James Joyce """ search_enabled = getattr(settings, 'WAGTAILAPI_SEARCH_ENABLED', True) if 'search' in request.GET: if not search_enabled: raise BadRequestError("search is disabled") # Searching and filtering by tag at the same time is not supported if getattr(queryset, '_filtered_by_tag', False): raise BadRequestError("filtering by tag with a search query is not supported") search_query = request.GET['search'] search_operator = request.GET.get('search_operator', None) order_by_relevance = 'order' not in request.GET sb = get_search_backend() try: queryset = sb.search(search_query, queryset, operator=search_operator, order_by_relevance=order_by_relevance) except FilterFieldError as e: raise BadRequestError("cannot filter by '{}' while searching (field is not indexed)".format(e.field_name)) except OrderByFieldError as e: raise BadRequestError("cannot order by '{}' while searching (field is not indexed)".format(e.field_name)) return queryset
def search(request): search_query = request.GET.get('query', None) page = request.GET.get('page', 1) backend = get_search_backend() # Search if search_query: search_results = Page.objects.live().search(search_query).results() query = Query.get(search_query) # Adding non - page models to the search results # search_results += backend.search(search_query, AboutPageQuestion.objects.all()).results() # search_results += backend.search(search_query, AboutPageTopic.objects.all()).results() # Record hit query.add_hit() else: search_results = Page.objects.none() # Pagination paginator = Paginator(search_results, 20) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) return render(request, 'search/search.html', { 'search_query': search_query, 'search_results': search_results, })
def resolve(self, _, info, filter=None, **pager): qs = models.Lead.objects.all() if filter: if filter.get('status'): qs = qs.filter(status=filter['status']) if filter.get('curated_by_me'): qs = qs.filter(curated_by=info.context.user) if filter.get('curated_by_empty'): qs = qs.filter(curated_by__isnull=True) if filter and filter.get('search'): s = get_search_backend() nodes = list(s.search(filter['search'], qs)) return RelayConnection( pageInfo=PageInfo( hasPreviousPage=False, hasNextPage=False, startCursor='search', endCursor='search', ), nodes=nodes, edges=[{ 'node': node } for node in nodes], ) return qs.relay_page(**pager)
def cigi_search_promoted(content_type=None, sort=None, contenttypes=None, contentsubtypes=None, authors=None, projects=None, topics=None, searchtext=None, articletypeid=None, publicationtypeid=None, publicationseriesid=None, multimediaseriesid=None, experts=None): return CIGIOnlineElevatedElasticsearchResults( get_search_backend(), CIGIOnlineElevatedSearchQueryCompiler(content_type, sort, contenttypes, contentsubtypes, authors, projects, topics, searchtext, articletypeid, publicationtypeid, publicationseriesid, multimediaseriesid, experts) )
def test_ga_submit_tracking_with_custom_params__authenticated( self, mock_method): self.backend = get_search_backend('default') self.backend.reset_index() self.mk_articles(self.english_section, count=2) self.backend.refresh_index() custom_params = {'cd2': '1235-245'} response = self.client.get(reverse('search'), {'q': 'Test'}) headers = {'HTTP_X_IORG_FBS_UIP': '100.100.200.10'} request = self.make_fake_request('/search/?q=Test', headers, self.user) middleware = MoloGoogleAnalyticsMiddleware() account = '' middleware.submit_tracking( account, request, response, custom_params, ) # check if uuid was there self.assertTrue('user_id' in mock_method._mock_call_args[1]) self.assertTrue('custom_params' in mock_method._mock_call_args[1]) self.assertTrue(mock_method._mock_call_args[1]['custom_params'], custom_params)
def index(request, pk): newsindex = get_object_or_404(Page.objects.specific().type(NewsIndexMixin), pk=pk) NewsItem = newsindex.get_newsitem_model() if not user_can_edit_newsitem(request.user, NewsItem): raise PermissionDenied() newsitem_list = NewsItem.objects.filter(newsindex=newsindex) query = None try: query = request.GET['q'] except KeyError: pass else: backend = get_search_backend() newsitem_list = backend.search(query, newsitem_list) paginator, page = paginate(request, newsitem_list) return render( request, 'wagtailnews/index.html', { 'newsindex': newsindex, 'page': page, 'paginator': paginator, 'newsitem_list': page.object_list, 'newsitem_perms': perms_for_template(request, NewsItem), 'query_string': query, })
def test_import_by_path_postgres_db_vendor(self): from wagtail.search.backends.database.postgres.postgres import ( PostgresSearchBackend, ) db = get_search_backend(backend="wagtail.search.backends.database") self.assertIsInstance(db, PostgresSearchBackend)
def test_import_by_full_path_mysql_db_vendor(self): from wagtail.search.backends.database.mysql.mysql import MySQLSearchBackend db = get_search_backend( backend="wagtail.search.backends.database.SearchBackend" ) self.assertIsInstance(db, MySQLSearchBackend)
def get_results_for_term(term): s = get_search_backend() # get page req = requests.get('https://www.sussexstudent.com/msl-search/?q={}'.format(term)) document = bs4.BeautifulSoup(req.text) groups = [get_group_result(result) for result in document.select('.search_groupings dt')] pages = [get_page_result(result) for result in document.select('.search_pages dt')] events = [get_event_result(result) for result in document.select('.search_events .event')] news = [get_news_result(result) for result in document.select('.search_news .news_item')] falmer_groups = s.search(term, StudentGroup.objects.all()) falmer_events = s.search(term, Event.objects.all()) all_unsorted = groups + pages + events + news results_map = {item['uuid']:item for item in all_unsorted} title_map = {item['title']:item['uuid'] for item in all_unsorted} fuzz_sorted = process.extract(term, title_map.keys(), limit=15) return { 'results': results_map, 'groups': [item['uuid'] for item in groups], 'news': [item['uuid'] for item in news], 'pages': [item['uuid'] for item in pages], 'events': [item['uuid'] for item in events], 'top': [title_map[fuzz_result[0]] for fuzz_result in fuzz_sorted], }
def handle(self, *args, **kwargs): search_backend = get_search_backend("fulltext") revision_results = search_backend.search(kwargs["search string"], IndexedPageRevision) filename = kwargs['filename'] if not filename: # Ensure stdout can't be closed by the context manager below self.stdout.close = lambda: None with (open(filename, 'w', newline='', encoding='utf-8') if filename else self.stdout) as f: writer = csv.writer(f) writer.writerow([ 'Page id', 'Revision id', 'Preview URL', 'Revision created date' ]) writer.writerows([( r.page_id, r.id, reverse('wagtailadmin_pages:revisions_view', args=(r.page_id, r.id)), r.created_at.isoformat(), ) for r in revision_results if Page.objects.filter(id=r.page_id).exists()])
def search(request): search_query = request.GET.get('query', '') if not search_query: return JsonResponse({}) target_model = request.GET.get('type', 'wagtailcore.Page') try: model = apps.get_model(target_model) except Exception: return HttpResponseBadRequest() try: limit = int(request.GET.get('limit', 100)) except ValueError: return HttpResponseBadRequest() field_name = getattr(model, 'autocomplete_search_field', None) if issubclass(model, Indexed): search_backend = get_search_backend() if field_name: queryset = search_backend.search(search_query, model, fields=[field_name]) else: queryset = search_backend.search(search_query, model) else: field_name = field_name if field_name else 'title' filter_kwargs = dict() filter_kwargs[field_name + '__icontains'] = search_query queryset = model.objects.filter(**filter_kwargs) if getattr(queryset, 'live', None): # Non-Page models like Snippets won't have a live/published status # and thus should not be filtered with a call to `live`. queryset = queryset.live() exclude = request.GET.get('exclude', '') if exclude and queryset: try: exclusions = [unquote(item) for item in exclude.split(',') if item] queryset = queryset.exclude(pk__in=exclusions) except: pass results = list(map(render_page, queryset[:limit])) if request.GET.get('can_edit', False): if issubclass(model, Page): for index, result in enumerate(results): results[index]['edit_link'] = reverse( 'wagtailadmin_pages:edit', args=(result['pk'], )) else: url_helper = AdminURLHelper(model) for index, result in enumerate(results): results[index]['edit_link'] = url_helper.get_action_url( 'edit', result['pk']) return JsonResponse(dict(items=results))
def search(self, query_string, fields=None, operator=None, order_by_relevance=True, backend='default'): """ This runs a search query on all the items in the QuerySet """ search_backend = get_search_backend(backend) return search_backend.search(query_string, self, fields=fields, operator=operator, order_by_relevance=order_by_relevance)
def get_object_list(self, search_term=None, **kwargs): object_list = self.get_unfiltered_object_list() if search_term: search_backend = get_search_backend() object_list = search_backend.search(search_term, object_list) return object_list
def get_elasticsearch_backend(self): from django.conf import settings from wagtail.search.backends import get_search_backend if 'elasticsearch' not in settings.WAGTAILSEARCH_BACKENDS: raise unittest.SkipTest("No elasticsearch backend active") return get_search_backend('elasticsearch')
def _search_newsitems(request, newsitem_models, query): backend = get_search_backend() for NewsItem in newsitem_models: results = backend.search(query, NewsItem)[:10] if results: yield (NewsItem._meta.verbose_name_plural, perms_for_template(request, NewsItem), results)
def get_context(self, request): word = request.GET.get('key') context = super().get_context(request) s = get_search_backend() posts = s.search(word, BlogPage) SetContext(context) context['posts'] = posts return context
def get_object_list(self): object_list = self.get_unfiltered_object_list() if self.is_searching: search_backend = get_search_backend() object_list = search_backend.search(self.search_query, object_list) return object_list
def autocomplete(self, query, fields=None, operator=None, order_by_relevance=True, backend='default'): """ This runs an autocomplete query on all the items in the QuerySet """ search_backend = get_search_backend(backend) return search_backend.autocomplete(query, self, fields=fields, operator=operator, order_by_relevance=order_by_relevance)
def filter_object_list(self, objects, form): search_query = form.cleaned_data.get("q") if search_query: search_backend = get_search_backend() objects = search_backend.search(search_query, objects) self.is_searching = True self.search_query = search_query return objects
def search(request): search_query = request.GET.get('query', '') best = str(request.GET.get('best', None)) page = request.GET.get('page', 1) pagination_settings = PaginationSettings.for_site(request.site) if best == 'None': is_best = False else: is_best = True # Search if search_query: s = get_search_backend() if is_best: search_results = s.search(search_query, Lap.objects.order_by('-lap_date').filter(best=is_best), operator="and", order_by_relevance=False) else: search_results = s.search(search_query, Lap.objects.order_by('-lap_date'), operator="and", order_by_relevance=False) #search_results = Page.objects.live().search(search_query) query = Query.get(search_query) # Record hit query.add_hit() else: #search_results = Page.objects.none() if is_best: search_results = Lap.objects.all().order_by('-lap_date').filter(best=is_best) else: search_results = Lap.objects.all().order_by('-lap_date') # Pagination paginator = Paginator(search_results, pagination_settings.items_per_page) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) # Get the index of the current page index = search_results.number - 1 # edited to something easier without index # This value is maximum index of your pages, so the last page - 1 max_index = len(paginator.page_range) # You want a range of 7, so lets calculate where to slice the list start_index = index - pagination_settings.page_range if index >= pagination_settings.page_range else 0 end_index = index + pagination_settings.page_range if index <= max_index - pagination_settings.page_range else max_index # Get our new page range. In the latest versions of Django page_range returns # an iterator. Thus pass it to list, to make our slice possible again. page_range = list(paginator.page_range)[start_index:end_index] if is_best == False: is_best = 'None' return render(request, 'search/search.html', { 'search_query': search_query, 'search_results': search_results, 'best': is_best, 'page_range': page_range, })
def resolve_queryset(qs, info, limit=None, offset=None, search_query=None, id=None, order=None, collection=None, **kwargs): """ Add limit, offset and search capabilities to the query. This contains argument names used by :class:`~grapple.types.structures.QuerySetList`. :param qs: The query set to be modified. :param info: The Graphene info object. :param limit: Limit number of objects in the QuerySet. :type limit: int :param id: Filter by the primary key. :type id: int :param offset: Omit a number of objects from the beginning of the query set :type offset: int :param search_query: Using Wagtail search, exclude objects that do not match the search query. :type search_query: str :param order: Order the query set using the Django QuerySet order_by format. :type order: str :param collection: Use Wagtail's collection id to filter images or documents :type collection: int """ offset = int(offset or 0) if id is not None: qs = qs.filter(pk=id) else: qs = qs.all() if id is None and search_query: # Check if the queryset is searchable using Wagtail search. if not class_is_indexed(qs.model): raise TypeError("This data type is not searchable by Wagtail.") if settings.GRAPPLE_ADD_SEARCH_HIT is True: query = Query.get(search_query) query.add_hit() return get_search_backend().search(search_query, qs) if order is not None: qs = qs.order_by(*map(lambda x: x.strip(), order.split(","))) if limit is not None: limit = int(limit) qs = qs[offset:limit + offset] if collection is not None: qs = qs.filter(collection=collection) return qs
def list(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) permissions = [ get_permission_name(action, model) for action in ['add', 'change', 'delete'] ] if not any([request.user.has_perm(perm) for perm in permissions]): return permission_denied(request) items = model.objects.all() # Preserve the snippet's model-level ordering if specified, but fall back on PK if not # (to ensure pagination is consistent) if not items.ordered: items = items.order_by('pk') # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': model._meta.verbose_name_plural }) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': model._meta.verbose_name_plural }) paginator = Paginator(items, per_page=20) paginated_items = paginator.get_page(request.GET.get('p')) # Template if request.is_ajax(): template = 'wagtailsnippets/snippets/results.html' else: template = 'wagtailsnippets/snippets/type_index.html' return render(request, template, { 'model_opts': model._meta, 'items': paginated_items, 'can_add_snippet': request.user.has_perm(get_permission_name('add', model)), 'can_delete_snippets': request.user.has_perm(get_permission_name('delete', model)), 'is_searchable': is_searchable, 'search_form': search_form, 'is_searching': is_searching, 'query_string': search_query, })
def filter(self, objects): objects = super().filter(objects) search_query = self.cleaned_data.get("q") if search_query: search_backend = get_search_backend() objects = search_backend.search(search_query, objects) self.is_searching = True self.search_query = search_query return objects
def choose(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) items = model.objects.all() # Preserve the snippet's model-level ordering if specified, but fall back on PK if not # (to ensure pagination is consistent) if not items.ordered: items = items.order_by('pk') # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm( request.GET, placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': model._meta.verbose_name}) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm( placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': model._meta.verbose_name}) # Pagination paginator = Paginator(items, per_page=25) paginated_items = paginator.get_page(request.GET.get('p')) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render( request, "wagtailsnippets/chooser/results.html", { 'model_opts': model._meta, 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow(request, 'wagtailsnippets/chooser/choose.html', None, { 'model_opts': model._meta, 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, }, json_data={'step': 'choose'})
def get(self, request, app_label, model_name): self.model = get_snippet_model_from_url_params(app_label, model_name) items = self.model.objects.all() # Preserve the snippet's model-level ordering if specified, but fall back on PK if not # (to ensure pagination is consistent) if not items.ordered: items = items.order_by('pk') # Filter by locale self.locale = None self.locale_filter = None self.selected_locale = None if issubclass(self.model, TranslatableMixin): # 'locale' is the Locale of the object that this snippet is being chosen for if request.GET.get('locale'): self.locale = get_object_or_404( Locale, language_code=request.GET['locale']) # 'locale_filter' is the current value of the "Locale" selector in the UI if request.GET.get('locale_filter'): self.locale_filter = get_object_or_404( Locale, language_code=request.GET['locale_filter']) self.selected_locale = self.locale_filter or self.locale if self.selected_locale: items = items.filter(locale=self.selected_locale) # Search self.is_searchable = class_is_indexed(self.model) self.is_searching = False self.search_query = None if self.is_searchable and 'q' in request.GET: self.search_form = SearchForm( request.GET, placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': self.model._meta.verbose_name}) if self.search_form.is_valid(): self.search_query = self.search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(self.search_query, items) self.is_searching = True else: self.search_form = SearchForm( placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': self.model._meta.verbose_name}) # Pagination paginator = Paginator(items, per_page=25) self.paginated_items = paginator.get_page(request.GET.get('p')) return self.render_to_response()
def setUp(self): backend_name = "wagtail.search.backends.database.postgres" for conf in settings.WAGTAILSEARCH_BACKENDS.values(): if conf["BACKEND"] == backend_name: break else: raise unittest.SkipTest("Only for %s" % backend_name) self.backend = get_search_backend(backend_name)
def choose(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) items = model.objects.all() # Preserve the snippet's model-level ordering if specified, but fall back on PK if not # (to ensure pagination is consistent) if not items.ordered: items = items.order_by('pk') # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': model._meta.verbose_name }) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': model._meta.verbose_name }) # Pagination paginator = Paginator(items, per_page=25) paginated_items = paginator.get_page(request.GET.get('p')) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render(request, "wagtailsnippets/chooser/results.html", { 'model_opts': model._meta, 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow( request, 'wagtailsnippets/chooser/choose.html', None, { 'model_opts': model._meta, 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, }, json_data={'step': 'choose'} )
def setUp(self): # Search WAGTAILSEARCH_BACKENDS for an entry that uses the given backend path for backend_name, backend_conf in settings.WAGTAILSEARCH_BACKENDS.items(): if backend_conf['BACKEND'] == self.backend_path: self.backend = get_search_backend(backend_name) self.backend_name = backend_name break else: # no conf entry found - skip tests for this backend raise unittest.SkipTest("No WAGTAILSEARCH_BACKENDS entry for the backend %s" % self.backend_path) management.call_command('update_index', backend_name=self.backend_name, stdout=StringIO(), chunk_size=50)
def get_elasticsearch_backend(self): from django.conf import settings from wagtail.search.backends import get_search_backend backend_path = 'wagtail.search.backends.elasticsearch' # Search WAGTAILSEARCH_BACKENDS for an entry that uses the given backend path for backend_name, backend_conf in settings.WAGTAILSEARCH_BACKENDS.items(): if backend_conf['BACKEND'] == backend_path: return get_search_backend(backend_name) else: # no conf entry found - skip tests for this backend raise unittest.SkipTest("No WAGTAILSEARCH_BACKENDS entry for the backend %s" % backend_path)
def choose(request): # TODO: Ideally this would return the endnotes for the current article. items = EndNote.objects.all() # Search is_searchable = class_is_indexed(EndNote) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search End Notes")) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search End Notes")) # Pagination p = request.GET.get("p", 1) paginator = Paginator(items, 25) try: paginated_items = paginator.page(p) except PageNotAnInteger: paginated_items = paginator.page(1) except EmptyPage: paginated_items = paginator.page(paginator.num_pages) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render(request, "content_notes/chooser/results.html", { 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow( request, 'content_notes/chooser/choose.html', 'content_notes/chooser/choose.js', { 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, } )
def test_search_only_includes_articles(self): self.backend = get_search_backend('default') self.backend.reset_index() self.mk_articles(self.english_section, count=2) footer = FooterPage(title='Test Footer') self.footer_index.add_child(instance=footer) footer.save_revision().publish() self.backend.refresh_index() response = self.client.get(reverse('search'), { 'q': 'Test' }) results = response.context['results'] for article in results: self.assertNotEqual(article.title, 'Test Footer')
def test_ga_middleware(self, mock_method): self.backend = get_search_backend('default') self.backend.reset_index() self.mk_articles(self.english_section, count=2) self.backend.refresh_index() response = self.client.get(reverse('search'), { 'q': 'Test' }) headers = {'HTTP_X_IORG_FBS_UIP': '100.100.200.10'} request = self.make_fake_request( '/search/?q=Test', headers) middleware = MoloGoogleAnalyticsMiddleware() account = '' response = middleware.submit_tracking(account, request, response) self.assertTrue(mock_method.called_with(request.get_full_path()))
def list(request): items = EndNote.objects.all() # Search is_searchable = class_is_indexed(EndNote) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search End Notes")) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search End Notes")) # Pagination p = request.GET.get('p', 1) paginator = Paginator(items, 20) try: paginated_items = paginator.page(p) except PageNotAnInteger: paginated_items = paginator.page(1) except EmptyPage: paginated_items = paginator.page(paginator.num_pages) # Template if request.is_ajax(): template = 'content_notes/endnotes/results.html' else: template = 'content_notes/endnotes/type_index.html' return render(request, template, { 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'is_searching': is_searching, 'query_string': search_query, })
def get_queryset(self): queryset = super().get_queryset() search_form = self.get_search_form() if search_form.is_valid(): q = search_form.cleaned_data['q'] if class_is_indexed(queryset.model): search_backend = get_search_backend() queryset = search_backend.search(q, queryset, fields=self.search_fields) else: filters = { field + '__icontains': q for field in self.search_fields or [] } queryset = queryset.filter(**filters) return queryset
def update_backend(self, backend_name, schema_only=False, chunk_size=DEFAULT_CHUNK_SIZE): self.stdout.write("Updating backend: " + backend_name) backend = get_search_backend(backend_name) if not backend.rebuilder_class: self.stdout.write("Backend '%s' doesn't require rebuilding" % backend_name) return models_grouped_by_index = group_models_by_index(backend, get_indexed_models()).items() if not models_grouped_by_index: self.stdout.write(backend_name + ": No indices to rebuild") for index, models in models_grouped_by_index: self.stdout.write(backend_name + ": Rebuilding index %s" % index.name) # Start rebuild rebuilder = backend.rebuilder_class(index) index = rebuilder.start() # Add models for model in models: index.add_model(model) # Add objects object_count = 0 if not schema_only: for model in models: self.stdout.write('{}: {}.{} '.format(backend_name, model._meta.app_label, model.__name__).ljust(35), ending='') # Add items (chunk_size at a time) for chunk in self.print_iter_progress(self.queryset_chunks(model.get_indexed_objects().order_by('pk'), chunk_size)): index.add_items(model, chunk) object_count += len(chunk) self.print_newline() # Finish rebuild rebuilder.finish() self.stdout.write(backend_name + ": indexed %d objects" % object_count) self.print_newline()
def test_search_empty_values(self): self.backend = get_search_backend('default') self.backend.reset_index() self.mk_article( self.english_section, title="Site 1 article") self.mk_article( self.yourmind2, title="Site 2 article") self.backend.refresh_index() response = self.client.get(reverse('search'), { 'q': ' ' }) self.assertEqual(response.status_code, 200) response = self.client.get(reverse('search'), { 'q': ' article ' }) self.assertContains(response, 'Site 1 article') self.assertNotContains(response, 'Site 2 article')
def test_search(self): self.backend = get_search_backend('default') self.backend.reset_index() self.mk_articles(self.english_section, count=20) self.backend.refresh_index() response = self.client.get(reverse('search'), { 'q': 'Test' }) self.assertContains(response, 'Page 1 of 2') self.assertContains(response, '→') self.assertNotContains(response, '←') response = self.client.get(reverse('search'), { 'q': 'Test', 'p': '2', }) self.assertContains(response, 'Page 2 of 2') self.assertNotContains(response, '→') self.assertContains(response, '←') response = self.client.get(reverse('search'), { 'q': 'Test', 'p': 'foo', }) self.assertContains(response, 'Page 1 of 2') response = self.client.get(reverse('search'), { 'q': 'Test', 'p': '4', }) self.assertContains(response, 'Page 2 of 2') response = self.client.get(reverse('search'), { 'q': 'magic' }) self.assertContains(response, 'No search results for magic') response = self.client.get(reverse('search')) self.assertContains(response, 'No search results for None')
def test_search_works_with_multilanguages(self): self.backend = get_search_backend('default') self.backend.reset_index() eng_article = self.mk_article( self.english_section, title="English article") self.mk_article_translation( eng_article, self.french, title='French article') self.backend.refresh_index() self.client.get('/locale/en/') response = self.client.get(reverse('search'), { 'q': 'article' }) self.assertContains(response, 'English article') self.assertNotContains(response, 'French article') self.client.get('/locale/fr/') response = self.client.get(reverse('search'), { 'q': 'article' }) self.assertContains(response, 'French article') self.assertNotContains(response, 'English article')
def setUp(self): self.backend = get_search_backend(self.backend_name) self.reset_index() for page in Page.objects.all(): self.backend.add(page) self.refresh_index()
def test_import_by_name(self): db = get_search_backend(backend='default') self.assertIsInstance(db, DatabaseSearchBackend)
def test_import_by_full_path(self): db = get_search_backend(backend='wagtail.search.backends.db.DatabaseSearchBackend') self.assertIsInstance(db, DatabaseSearchBackend)