def test_backend_loader(self): # Test DB backend import db = get_search_backend(backend='wagtail.wagtailsearch.backends.db.DBSearch') self.assertIsInstance(db, DBSearch) # Test Elastic search backend import elasticsearch = get_search_backend(backend='wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch') self.assertIsInstance(elasticsearch, ElasticSearch) # Test loading a non existant backend self.assertRaises(InvalidSearchBackendError, get_search_backend, backend='wagtail.wagtailsearch.backends.doesntexist.DoesntExist') # Test something that isn't a backend self.assertRaises(InvalidSearchBackendError, get_search_backend, backend="I'm not a backend!")
def setUp(self): self.backend = get_search_backend('elasticsearch') self.backend.rebuilder_class = self.backend.atomic_rebuilder_class self.es = self.backend.es self.rebuilder = self.backend.get_rebuilder() self.backend.reset_index()
def filter_queryset(self, request, queryset, view): """ This performs a full-text search on the result set Eg: ?search=James Joyce """ search_enabled = getattr(settings, 'WAGTAILAPI_SEARCH_ENABLED', True) if 'search' in request.GET: if not search_enabled: raise BadRequestError("search is disabled") # Searching and filtering by tag at the same time is not supported if getattr(queryset, '_filtered_by_tag', False): raise BadRequestError( "filtering by tag with a search query is not supported") search_query = request.GET['search'] search_operator = request.GET.get('search_operator', None) sb = get_search_backend() queryset = sb.search(search_query, queryset, operator=search_operator) return queryset
def search(cls, q, results_per_page=None, page=1, prefetch_tags=False, filters={}): # Run search query search_backend = get_search_backend() if prefetch_tags: results = search_backend.search( q, cls, prefetch_related=['tagged_items__tag'], filters=filters) else: results = search_backend.search(q, cls, filters=filters) # If results_per_page is set, return a paginator if results_per_page is not None: paginator = Paginator(results, results_per_page) try: return paginator.page(page) except PageNotAnInteger: return paginator.page(1) except EmptyPage: return paginator.page(paginator.num_pages) else: return results
def search(request, extra_context): # Search search_query = request.GET.get('query', None) if search_query: page_results = Page.objects.live().search(search_query).annotate_score("score") # Log the query so Wagtail can suggest promoted results Query.get(search_query).add_hit() # Also query non-wagtail models s = get_search_backend() model_results = s.search(search_query, BaseImpactModel.objects.filter(impact_model__public=True)) else: page_results = [] model_results = [] context = { 'search_query': search_query, 'page_results': page_results, 'model_results': model_results, } # raise Exception(dir(model_results[0])) if extra_context is not None: context.update(extra_context) # raise Exception(search_results) # Render template return render(request, 'pages/search_page.html', context)
def search(cls, query_string, show_unpublished=False, search_title_only=False, extra_filters={}, prefetch_related=[], path=None): # Filters filters = extra_filters.copy() if not show_unpublished: filters['live'] = True # Path if path: filters['path__startswith'] = path # Fields fields = None if search_title_only: fields = ['title'] # Search s = get_search_backend() return s.search(query_string, cls, fields=fields, filters=filters, prefetch_related=prefetch_related)
def test_ga_middleware(self, mock_method): """ When a url is request the path that goes to GA must include the gender and age if available. """ self.backend = get_search_backend('default') self.backend.reset_index() self.mk_articles(self.english_section, count=2) self.backend.refresh_index() response = self.client.get(reverse('search'), {'q': 'Test'}) headers = {'HTTP_X_IORG_FBS_UIP': '100.100.200.10'} request = self.make_fake_request('/search/?q=Test', headers) middleware = IogtMoloGoogleAnalyticsMiddleware() account = '' response = middleware.submit_tracking(account, request, response) mock_method.assert_called() args, kwargs = mock_method.call_args_list[0] url = args[0]['utm_url'] self.assertTrue('cd1=17' in url) self.assertTrue('cd2=female' in url)
def index(request, pk): newsindex = get_object_or_404( Page.objects.specific().type(NewsIndexMixin), pk=pk) NewsItem = newsindex.get_newsitem_model() if not user_can_edit_newsitem(request.user, NewsItem): raise PermissionDenied() newsitem_list = NewsItem.objects.filter(newsindex=newsindex) query = None try: query = request.GET['q'] except KeyError: pass else: backend = get_search_backend() newsitem_list = backend.search(query, newsitem_list) paginator, page = paginate(request, newsitem_list) return render(request, 'wagtailnews/index.html', { 'newsindex': newsindex, 'page': page, 'paginator': paginator, 'newsitem_list': page.object_list, 'newsitem_perms': perms_for_template(request, NewsItem), 'query_string': query, })
def update_backend(self, backend_name, object_list): # Print info self.stdout.write("Updating backend: " + backend_name) # Get backend backend = get_search_backend(backend_name) # Reset the index self.stdout.write(backend_name + ": Reseting index") backend.reset_index() for model, queryset in object_list: self.stdout.write(backend_name + ": Indexing model '%s.%s'" % ( model._meta.app_label, model.__name__, )) # Add type backend.add_type(model) # Add objects backend.add_bulk(model, queryset) # Refresh index self.stdout.write(backend_name + ": Refreshing index") backend.refresh_index()
def search(request): search_query = request.GET.get('query', None) # post = request.GET.get('post', 1) # Search if search_query: s = get_search_backend() search_results = s.search(search_query, Post) query = Query.get(search_query) #search_results = list(chain(profile_results) # Record hit query.add_hit() else: search_results = Post.objects.none() # Pagination paginator = Paginator(search_results, 20) try: search_results = paginator.page(search_query) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) return render(request, 'search/search.html', { 'search_query': search_query, 'search_results': search_results, })
def search( cls, query_string, show_unpublished=False, search_title_only=False, extra_filters={}, prefetch_related=[], path=None, ): # Filters filters = extra_filters.copy() if not show_unpublished: filters["live"] = True # Path if path: filters["path__startswith"] = path # Fields fields = None if search_title_only: fields = ["title"] # Search s = get_search_backend() return s.search(query_string, cls, fields=fields, filters=filters, prefetch_related=prefetch_related)
def search(request): do_json = 'json' in request.GET search_query = request.GET.get('query', None) page = request.GET.get('page', 1) # Search if search_query: page_alias_content_type = ContentType.objects.get_for_model(PageAlias) search_results = ( Page.objects.live() # exclude root and home pages .filter(depth__gt=2) # exclude PageAlias pages .exclude(content_type=page_alias_content_type) .search(search_query) ) query = Query.get(search_query) # log the query so Wagtail can suggest promoted results query.add_hit() # promoted search results promoted_page_ids = [ pick.page.id for pick in query.editors_picks.all() ] promoted_results = Page.objects.filter(pk__in=promoted_page_ids) # search Person snippets search_backend = get_search_backend() people_results = search_backend.search( search_query, Person.objects.all() ) else: search_results = Page.objects.none() promoted_results = Page.objects.none() people_results = Person.objects.none() # Pagination paginator = Paginator(search_results, 10) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) response = { 'search_query': search_query, 'search_results': search_results, 'promoted_results': promoted_results, 'people_results': people_results, } if do_json: return JsonResponse(get_results_json(response)) else: return render(request, 'search/search.html', response)
def list(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) permissions = [ get_permission_name(action, model) for action in ['add', 'change', 'delete'] ] if not any([request.user.has_perm(perm) for perm in permissions]): return permission_denied(request) items = model.objects.all() # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm( request.GET, placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': model._meta.verbose_name_plural}) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm( placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': model._meta.verbose_name_plural}) paginator, paginated_items = paginate(request, items) # Template if request.is_ajax(): template = 'wagtailsnippets/snippets/results.html' else: template = 'wagtailsnippets/snippets/type_index.html' return render( request, template, { 'model_opts': model._meta, 'items': paginated_items, 'can_add_snippet': request.user.has_perm(get_permission_name('add', model)), 'is_searchable': is_searchable, 'search_form': search_form, 'is_searching': is_searching, 'query_string': search_query, })
def search(self, query_string, fields=None, operator=None, order_by_relevance=True, backend='default'): """ This runs a search query on all the items in the QuerySet """ search_backend = get_search_backend(backend) return search_backend.search(query_string, self, fields=fields, operator=operator, order_by_relevance=order_by_relevance)
def _search_newsitems(request, newsitem_models, query): backend = get_search_backend() for NewsItem in newsitem_models: results = backend.search(query, NewsItem)[:10] if results: yield (NewsItem._meta.verbose_name_plural, perms_for_template(request, NewsItem), results)
def handle(self, **options): # Print info self.stdout.write("Getting object list") # Get list of indexed models indexed_models = [model for model in models.get_models() if issubclass(model, Indexed)] # Object set object_set = {} # Add all objects to object set and detect any duplicates # Duplicates are caused when both a model and a derived model are indexed # Eg, if BlogPost inherits from Page and both of these models are indexed # If we were to add all objects from both models into the index, all the BlogPosts will have two entries for model in indexed_models: # Get toplevel content type toplevel_content_type = model.indexed_get_toplevel_content_type() # Loop through objects for obj in model.get_indexed_objects(): # Get key for this object key = toplevel_content_type + ':' + str(obj.pk) # Check if this key already exists if key in object_set: # Conflict, work out who should get this space # The object with the longest content type string gets the space # Eg, "wagtailcore.Page-myapp.BlogPost" kicks out "wagtailcore.Page" if len(obj.indexed_get_content_type()) > len(object_set[key].indexed_get_content_type()): # Take the spot object_set[key] = obj else: # Space free, take it object_set[key] = obj # Search backend if 'backend' in options: s = options['backend'] else: s = get_search_backend() # Reset the index self.stdout.write("Reseting index") s.reset_index() # Add types self.stdout.write("Adding types") for model in indexed_models: s.add_type(model) # Add objects to index self.stdout.write("Adding objects") for result in s.add_bulk(object_set.values()): self.stdout.write(result[0] + ' ' + str(result[1])) # Refresh index self.stdout.write("Refreshing index") s.refresh_index()
def search(request): do_json = 'json' in request.GET search_query = request.GET.get('query', None) page = request.GET.get('page', 1) # Search if search_query: page_alias_content_type = ContentType.objects.get_for_model(PageAlias) search_results = ( Page.objects.live() # exclude root and home pages .filter(depth__gt=2) # exclude PageAlias pages .exclude( content_type=page_alias_content_type).search(search_query)) query = Query.get(search_query) # log the query so Wagtail can suggest promoted results query.add_hit() # promoted search results promoted_page_ids = [ pick.page.id for pick in query.editors_picks.all() ] promoted_results = Page.objects.filter(pk__in=promoted_page_ids) # search Person snippets search_backend = get_search_backend() people_results = search_backend.search(search_query, Person.objects.all()) else: search_results = Page.objects.none() promoted_results = Page.objects.none() people_results = Person.objects.none() # Pagination paginator = Paginator(search_results, 10) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) response = { 'search_query': search_query, 'search_results': search_results, 'promoted_results': promoted_results, 'people_results': people_results, } if do_json: return JsonResponse(get_results_json(response)) else: return render(request, 'search/search.html', response)
def choose(request, content_type_app_name, content_type_model_name): content_type = get_content_type_from_url_params(content_type_app_name, content_type_model_name) model = content_type.model_class() snippet_type_name, snippet_type_name_plural = get_snippet_type_name( content_type) items = model.objects.all() # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm( request.GET, placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': snippet_type_name_plural}) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm( placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': snippet_type_name_plural}) # Pagination paginator, paginated_items = paginate(request, items, per_page=25) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render( request, "wagtailsnippets/chooser/results.html", { 'content_type': content_type, 'snippet_type_name': snippet_type_name, 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow( request, 'wagtailsnippets/chooser/choose.html', 'wagtailsnippets/chooser/choose.js', { 'content_type': content_type, 'snippet_type_name': snippet_type_name, 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, })
def find_backend(cls): if not hasattr(settings, 'WAGTAILSEARCH_BACKENDS'): if cls == DBSearch: return 'default' else: return for backend in settings.WAGTAILSEARCH_BACKENDS.keys(): if isinstance(get_search_backend(backend), cls): return backend
def choose(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) items = model.objects.all() # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and "q" in request.GET: search_form = SearchForm( request.GET, placeholder=_("Search %(snippet_type_name)s") % {"snippet_type_name": model._meta.verbose_name} ) if search_form.is_valid(): search_query = search_form.cleaned_data["q"] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm( placeholder=_("Search %(snippet_type_name)s") % {"snippet_type_name": model._meta.verbose_name} ) # Pagination paginator, paginated_items = paginate(request, items, per_page=25) # If paginating or searching, render "results.html" if request.GET.get("results", None) == "true": return render( request, "wagtailsnippets/chooser/results.html", { "model_opts": model._meta, "items": paginated_items, "query_string": search_query, "is_searching": is_searching, }, ) return render_modal_workflow( request, "wagtailsnippets/chooser/choose.html", "wagtailsnippets/chooser/choose.js", { "model_opts": model._meta, "items": paginated_items, "is_searchable": is_searchable, "search_form": search_form, "query_string": search_query, "is_searching": is_searching, }, )
def list(request, content_type_app_name, content_type_model_name): content_type = get_content_type_from_url_params(content_type_app_name, content_type_model_name) model = content_type.model_class() permissions = [ get_permission_name(action, model) for action in ['add', 'change', 'delete'] ] if not any([request.user.has_perm(perm) for perm in permissions]): return permission_denied(request) snippet_type_name, snippet_type_name_plural = get_snippet_type_name(content_type) items = model.objects.all() # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': snippet_type_name_plural }) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': snippet_type_name_plural }) paginator, paginated_items = paginate(request, items) # Template if request.is_ajax(): template = 'wagtailsnippets/snippets/results.html' else: template = 'wagtailsnippets/snippets/type_index.html' return render(request, template, { 'content_type': content_type, 'snippet_type_name': snippet_type_name, 'snippet_type_name_plural': snippet_type_name_plural, 'items': paginated_items, 'can_add_snippet': request.user.has_perm(get_permission_name('add', model)), 'is_searchable': is_searchable, 'search_form': search_form, 'is_searching': is_searching, 'query_string': search_query, })
def choose(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) items = model.objects.all() # Preserve the snippet's model-level ordering if specified, but fall back on PK if not # (to ensure pagination is consistent) if not items.ordered: items = items.order_by('pk') # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm( request.GET, placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': model._meta.verbose_name}) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm( placeholder=_("Search %(snippet_type_name)s") % {'snippet_type_name': model._meta.verbose_name}) # Pagination paginator, paginated_items = paginate(request, items, per_page=25) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render( request, "wagtailsnippets/chooser/results.html", { 'model_opts': model._meta, 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow( request, 'wagtailsnippets/chooser/choose.html', 'wagtailsnippets/chooser/choose.js', { 'model_opts': model._meta, 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, })
def list(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) permissions = [ get_permission_name(action, model) for action in ['add', 'change', 'delete'] ] if not any([request.user.has_perm(perm) for perm in permissions]): return permission_denied(request) items = model.objects.all() # Preserve the snippet's model-level ordering if specified, but fall back on PK if not # (to ensure pagination is consistent) if not items.ordered: items = items.order_by('pk') # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': model._meta.verbose_name_plural }) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': model._meta.verbose_name_plural }) paginator, paginated_items = paginate(request, items) # Template if request.is_ajax(): template = 'wagtailsnippets/snippets/results.html' else: template = 'wagtailsnippets/snippets/type_index.html' return render(request, template, { 'model_opts': model._meta, 'items': paginated_items, 'can_add_snippet': request.user.has_perm(get_permission_name('add', model)), 'is_searchable': is_searchable, 'search_form': search_form, 'is_searching': is_searching, 'query_string': search_query, })
def setUp(self): # Search WAGTAILSEARCH_BACKENDS for an entry that uses the given backend path for backend_name, backend_conf in settings.WAGTAILSEARCH_BACKENDS.items(): if backend_conf['BACKEND'] == self.backend_path: self.backend = get_search_backend(backend_name) break else: # no conf entry found - skip tests for this backend raise unittest.SkipTest("No WAGTAILSEARCH_BACKENDS entry for the backend %s" % self.backend_path) self.load_test_data()
def choose(request, content_type_app_name, content_type_model_name): content_type = get_content_type_from_url_params(content_type_app_name, content_type_model_name) model = content_type.model_class() snippet_type_name, snippet_type_name_plural = get_snippet_type_name(content_type) items = model.objects.all() # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': snippet_type_name_plural }) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': snippet_type_name_plural }) # Pagination paginator, paginated_items = paginate(request, items, per_page=25) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render(request, "wagtailsnippets/chooser/results.html", { 'content_type': content_type, 'snippet_type_name': snippet_type_name, 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow( request, 'wagtailsnippets/chooser/choose.html', 'wagtailsnippets/chooser/choose.js', { 'content_type': content_type, 'snippet_type_name': snippet_type_name, 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, } )
def setUp(self): # Search WAGTAILSEARCH_BACKENDS for an entry that uses the given backend path for backend_name, backend_conf in settings.WAGTAILSEARCH_BACKENDS.items(): if backend_conf['BACKEND'] == self.backend_path: self.backend = get_search_backend(backend_name) self.backend_name = backend_name break else: # no conf entry found - skip tests for this backend raise unittest.SkipTest("No WAGTAILSEARCH_BACKENDS entry for the backend %s" % self.backend_path) management.call_command('update_index', backend_name=self.backend_name, interactive=False, stdout=StringIO())
def setUp(self): # Search WAGTAILSEARCH_BACKENDS for an entry that uses the given backend path for backend_name, backend_conf in settings.WAGTAILSEARCH_BACKENDS.items(): if backend_conf['BACKEND'] == self.backend_path: self.backend = get_search_backend(backend_name) self.backend_name = backend_name break else: # no conf entry found - skip tests for this backend raise unittest.SkipTest("No WAGTAILSEARCH_BACKENDS entry for the backend %s" % self.backend_path) self.load_test_data()
def update_backend(self, backend_name, object_list): # Print info self.stdout.write("Updating backend: " + backend_name) # Get backend backend = get_search_backend(backend_name) # Activate backend language cur_language = translation.get_language() backend_language = getattr(backend, 'language_code', None) if backend_language is not None: translation.activate(backend_language) # Get rebuilder rebuilder = backend.get_rebuilder() if not rebuilder: self.stdout.write(backend_name + ": Backend doesn't support rebuild. Skipping") return # Start rebuild self.stdout.write(backend_name + ": Starting rebuild") index = rebuilder.start() for model, queryset in object_list: self.stdout.write(backend_name + ": Indexing model '%s.%s'" % ( model._meta.app_label, model.__name__, )) # Add model index.add_model(model) # Add items (1000 at a time) count = 0 for chunk in self.print_iter_progress( self.queryset_chunks(queryset)): index.add_items(model, chunk) count += len(chunk) self.stdout.write("Indexed %d %s" % (count, model._meta.verbose_name_plural)) self.print_newline() # Finish rebuild self.stdout.write(backend_name + ": Finishing rebuild") rebuilder.finish() # Return to Original Thread Language if backend_language is not None: translation.activate(cur_language)
def list(request, app_label, model_name): model = get_snippet_model_from_url_params(app_label, model_name) permissions = [get_permission_name(action, model) for action in ["add", "change", "delete"]] if not any([request.user.has_perm(perm) for perm in permissions]): return permission_denied(request) items = model.objects.all() # Search is_searchable = class_is_indexed(model) is_searching = False search_query = None if is_searchable and "q" in request.GET: search_form = SearchForm( request.GET, placeholder=_("Search %(snippet_type_name)s") % {"snippet_type_name": model._meta.verbose_name_plural}, ) if search_form.is_valid(): search_query = search_form.cleaned_data["q"] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm( placeholder=_("Search %(snippet_type_name)s") % {"snippet_type_name": model._meta.verbose_name_plural} ) paginator, paginated_items = paginate(request, items) # Template if request.is_ajax(): template = "wagtailsnippets/snippets/results.html" else: template = "wagtailsnippets/snippets/type_index.html" return render( request, template, { "model_opts": model._meta, "items": paginated_items, "can_add_snippet": request.user.has_perm(get_permission_name("add", model)), "is_searchable": is_searchable, "search_form": search_form, "is_searching": is_searching, "query_string": search_query, }, )
def test_backend_loader(self): # Test DB backend import db = get_search_backend( backend='wagtail.wagtailsearch.backends.db.DBSearch') self.assertIsInstance(db, DBSearch) # Test Elastic search backend import elasticsearch = get_search_backend( backend='wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch' ) self.assertIsInstance(elasticsearch, ElasticSearch) # Test loading a non existant backend self.assertRaises( InvalidSearchBackendError, get_search_backend, backend='wagtail.wagtailsearch.backends.doesntexist.DoesntExist') # Test something that isn't a backend self.assertRaises(InvalidSearchBackendError, get_search_backend, backend="I'm not a backend!")
def get_elasticsearch_backend(self): from django.conf import settings from wagtail.wagtailsearch.backends import get_search_backend backend_path = 'wagtail.wagtailsearch.backends.elasticsearch' # Search WAGTAILSEARCH_BACKENDS for an entry that uses the given backend path for backend_name, backend_conf in settings.WAGTAILSEARCH_BACKENDS.items(): if backend_conf['BACKEND'] == backend_path: return get_search_backend(backend_name) else: # no conf entry found - skip tests for this backend raise unittest.SkipTest("No WAGTAILSEARCH_BACKENDS entry for the backend %s" % backend_path)
def choose(request): # TODO: Ideally this would return the endnotes for the current article. items = EndNote.objects.all() # Search is_searchable = class_is_indexed(EndNote) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search End Notes")) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search End Notes")) # Pagination p = request.GET.get("p", 1) paginator = Paginator(items, 25) try: paginated_items = paginator.page(p) except PageNotAnInteger: paginated_items = paginator.page(1) except EmptyPage: paginated_items = paginator.page(paginator.num_pages) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render( request, "content_notes/chooser/results.html", { 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow( request, 'content_notes/chooser/choose.html', 'content_notes/chooser/choose.js', { 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, })
def update_backend(self, backend_name, schema_only=False): self.stdout.write("Updating backend: " + backend_name) backend = get_search_backend(backend_name) if not backend.rebuilder_class: self.stdout.write("Backend '%s' doesn't require rebuilding" % backend_name) return models_grouped_by_index = group_models_by_index( backend, get_indexed_models()).items() if not models_grouped_by_index: self.stdout.write(backend_name + ": No indices to rebuild") for index, models in models_grouped_by_index: self.stdout.write(backend_name + ": Rebuilding index %s" % index.name) # Start rebuild rebuilder = backend.rebuilder_class(index) index = rebuilder.start() # Add models for model in models: index.add_model(model) # Add objects object_count = 0 if not schema_only: for model in models: self.stdout.write('{}: {}.{} '.format( backend_name, model._meta.app_label, model.__name__).ljust(35), ending='') # Add items (1000 at a time) for chunk in self.print_iter_progress( self.queryset_chunks( model.get_indexed_objects().order_by('pk'))): index.add_items(model, chunk) object_count += len(chunk) self.print_newline() # Finish rebuild rebuilder.finish() self.stdout.write(backend_name + ": indexed %d objects" % object_count) self.print_newline()
def dosearch(query_string, **kwargs): # Get backend if 'backend' in kwargs: backend = kwargs['backend'] del kwargs['backend'] else: backend = 'default' # Build search kwargs search_kwargs = dict(model=cls, fields=self.fields, filters=self.filters) search_kwargs.update(kwargs) # Run search return get_search_backend(backend=backend).search(query_string, **search_kwargs)
def update_backend(self, backend_name, object_list): # Print info self.stdout.write("Updating backend: " + backend_name) # Get backend backend = get_search_backend(backend_name) # Activate backend language cur_language = translation.get_language() backend_language = getattr(backend, 'language_code', None) if backend_language is not None: translation.activate(backend_language) # Get rebuilder rebuilder = backend.get_rebuilder() if not rebuilder: self.stdout.write(backend_name + ": Backend doesn't support rebuild. Skipping") return # Start rebuild self.stdout.write(backend_name + ": Starting rebuild") index = rebuilder.start() for model, queryset in object_list: self.stdout.write(backend_name + ": Indexing model '%s.%s'" % ( model._meta.app_label, model.__name__, )) # Add model index.add_model(model) # Add items (1000 at a time) count = 0 for chunk in self.print_iter_progress(self.queryset_chunks(queryset)): index.add_items(model, chunk) count += len(chunk) self.stdout.write("Indexed %d %s" % ( count, model._meta.verbose_name_plural)) self.print_newline() # Finish rebuild self.stdout.write(backend_name + ": Finishing rebuild") rebuilder.finish() # Return to Original Thread Language if backend_language is not None: translation.activate(cur_language)
def choose_modal(request): try: newsitem_model_string = request.GET['type'] NewsItem = get_newsitem_model(newsitem_model_string) except (ValueError, KeyError): raise Http404 newsitem_list = NewsItem.objects.all() # Search is_searching = False search_query = None if 'q' in request.GET: search_form = SearchForm(request.GET, placeholder="Search news") if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() newsitem_list = search_backend.search(search_query, newsitem_list) is_searching = True else: search_form = SearchForm() # Pagination paginator, paginated_items = paginate(request, newsitem_list, per_page=10) # If paginating or searching, render "results.html" - these views are # accessed via AJAX so do not need the modal wrapper if request.GET.get('results', None) == 'true': return render(request, "wagtailnews/chooser/search_results.html", { 'query_string': search_query, 'items': paginated_items, 'is_searching': is_searching, }) return render_modal_workflow( request, 'wagtailnews/chooser/chooser.html', 'wagtailnews/chooser/choose.js', { 'query_string': search_query, 'newsitem_type': newsitem_model_string, 'items': paginated_items, 'is_searchable': True, 'is_searching': is_searching, 'search_form': search_form, } )
def test_import_old_name(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') db = get_search_backend(backend='wagtail.wagtailsearch.backends.db.DBSearch') self.assertIsInstance(db, DatabaseSearchBackend) self.assertEqual(len(w), 1) self.assertIs(w[0].category, RemovedInWagtail18Warning) self.assertEqual( str(w[0].message), "The 'wagtail.wagtailsearch.backends.db.DBSearch' search backend path has " "changed to 'wagtail.wagtailsearch.backends.db'. Please update the " "WAGTAILSEARCH_BACKENDS setting to use the new path." )
def test_import_old_name(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") db = get_search_backend(backend="wagtail.wagtailsearch.backends.db.DBSearch") self.assertIsInstance(db, DatabaseSearchBackend) self.assertEqual(len(w), 1) self.assertIs(w[0].category, RemovedInWagtail18Warning) self.assertEqual( str(w[0].message), "The 'wagtail.wagtailsearch.backends.db.DBSearch' search backend path has " "changed to 'wagtail.wagtailsearch.backends.db'. Please update the " "WAGTAILSEARCH_BACKENDS setting to use the new path.", )
def list(request): items = EndNote.objects.all() # Search is_searchable = class_is_indexed(EndNote) is_searching = False search_query = None if is_searchable and 'q' in request.GET: search_form = SearchForm(request.GET, placeholder=_("Search End Notes")) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = SearchForm(placeholder=_("Search End Notes")) # Pagination p = request.GET.get('p', 1) paginator = Paginator(items, 20) try: paginated_items = paginator.page(p) except PageNotAnInteger: paginated_items = paginator.page(1) except EmptyPage: paginated_items = paginator.page(paginator.num_pages) # Template if request.is_ajax(): template = 'content_notes/endnotes/results.html' else: template = 'content_notes/endnotes/type_index.html' return render( request, template, { 'items': paginated_items, 'is_searchable': is_searchable, 'search_form': search_form, 'is_searching': is_searching, 'query_string': search_query, })
def do_search(self, request, queryset): """ This performs a full-text search on the result set Eg: ?search=James Joyce """ search_enabled = getattr(settings, 'WAGTAILAPI_SEARCH_ENABLED', True) if 'search' in request.GET: if not search_enabled: raise self.BadRequestError("search is disabled") search_query = request.GET['search'] sb = get_search_backend() queryset = sb.search(search_query, queryset) return queryset
def test_search(self): self.backend = get_search_backend('default') self.backend.reset_index() for a in range(0, 20): ArticlePage.objects.create( title='article %s' % (a,), depth=a, subtitle='article %s subtitle' % (a,), slug='article-%s' % (a,), path=[a]) self.backend.refresh_index() response = self.client.get(reverse('search'), { 'q': 'article' }) self.assertContains(response, 'Page 1 of 2') self.assertContains(response, '→') self.assertNotContains(response, '←') response = self.client.get(reverse('search'), { 'q': 'article', 'p': '2', }) self.assertContains(response, 'Page 2 of 2') self.assertNotContains(response, '→') self.assertContains(response, '←') response = self.client.get(reverse('search'), { 'q': 'article', 'p': 'foo', }) self.assertContains(response, 'Page 1 of 2') response = self.client.get(reverse('search'), { 'q': 'article', 'p': '4', }) self.assertContains(response, 'Page 2 of 2') response = self.client.get(reverse('search'), { 'q': 'magic' }) self.assertContains(response, 'No search results for magic') response = self.client.get(reverse('search')) self.assertContains(response, 'No search results for None')
def setUp(self): s = get_search_backend() # Stick some documents into the index testa = models.SearchTest() testa.title = "Hello World" testa.save() s.add(testa) testb = models.SearchTest() testb.title = "Hello" testb.save() s.add(testb) testc = models.SearchTestChild() testc.title = "Hello" testc.save() s.add(testc)
def choose(request): items = Poll.objects.all() # Search is_searching = False search_query = None if 'q' in request.GET: search_form = AdminSearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % { 'snippet_type_name': 'Polls' }) if search_form.is_valid(): search_query = search_form.cleaned_data['q'] search_backend = get_search_backend() items = search_backend.search(search_query, items) is_searching = True else: search_form = AdminSearchForm() # Pagination paginator, paginated_items = paginate(request, items, per_page=25) # If paginating or searching, render "results.html" if request.GET.get('results', None) == 'true': return render(request, "wagtailpolls/search_results.html", { 'items': paginated_items, 'query_string': search_query, 'is_searching': is_searching, }) return render_modal_workflow( request, 'wagtailpolls/choose.html', 'wagtailpolls/choose.js', { 'snippet_type_name': 'Poll', 'items': paginated_items, 'is_searchable': True, 'search_form': search_form, 'query_string': search_query, 'is_searching': is_searching, } )
def search(cls, q, results_per_page=None, page=1, prefetch_tags=False, filters={}): # Run search query search_backend = get_search_backend() if prefetch_tags: results = search_backend.search(q, cls, prefetch_related=['tagged_items__tag'], filters=filters) else: results = search_backend.search(q, cls, filters=filters) # If results_per_page is set, return a paginator if results_per_page is not None: paginator = Paginator(results, results_per_page) try: return paginator.page(page) except PageNotAnInteger: return paginator.page(1) except EmptyPage: return paginator.page(paginator.num_pages) else: return results
def setUp(self): # Import using a try-catch block to prevent crashes if the elasticsearch-py # module is not installed try: from wagtail.wagtailsearch.backends.elasticsearch import ElasticSearch from wagtail.wagtailsearch.backends.elasticsearch import ElasticSearchAtomicIndexRebuilder from elasticsearch import NotFoundError except ImportError: raise unittest.SkipTest("elasticsearch-py not installed") self.ElasticSearch = ElasticSearch self.NotFoundError = NotFoundError self.backend = get_search_backend('elasticsearch') self.backend.rebuilder_class = ElasticSearchAtomicIndexRebuilder self.es = self.backend.es self.rebuilder = self.backend.get_rebuilder() self.backend.reset_index()
def update_backend(self, backend_name, schema_only=False): self.stdout.write("Updating backend: " + backend_name) backend = get_search_backend(backend_name) if not backend.rebuilder_class: self.stdout.write("Backend '%s' doesn't require rebuilding" % backend_name) return models_grouped_by_index = group_models_by_index(backend, get_indexed_models()).items() if not models_grouped_by_index: self.stdout.write(backend_name + ": No indices to rebuild") for index, models in models_grouped_by_index: self.stdout.write(backend_name + ": Rebuilding index %s" % index.name) # Start rebuild rebuilder = backend.rebuilder_class(index) index = rebuilder.start() # Add models for model in models: index.add_model(model) # Add objects object_count = 0 if not schema_only: for model in models: self.stdout.write('{}: {}.{} '.format(backend_name, model._meta.app_label, model.__name__).ljust(35), ending='') # Add items (1000 at a time) for chunk in self.print_iter_progress(self.queryset_chunks(model.get_indexed_objects())): index.add_items(model, chunk) object_count += len(chunk) self.print_newline() # Finish rebuild rebuilder.finish() self.stdout.write(backend_name + ": indexed %d objects" % object_count) self.print_newline()
def _wait_for_elasticsearch(sleep_interval=2, max_wait=600): """Wait for elasticsearch container to start""" from elasticsearch import ConnectionError from wagtail.wagtailsearch.backends import get_search_backend es = get_search_backend('default').es t0 = time.time() while True: try: if es.ping(): break except ConnectionError: if time.time() - t0 > max_wait: raise else: if time.time() - t0 > max_wait: raise Exception('Give up waiting for elasticsearch') print "Waiting for elasticsearch initialization" time.sleep(sleep_interval)
def update_backend(self, backend_name, schema_only=False): # Print info self.stdout.write("Updating backend: " + backend_name) # Get backend backend = get_search_backend(backend_name) # Get rebuilder rebuilder = backend.get_rebuilder() if not rebuilder: self.stdout.write(backend_name + ": Backend doesn't require rebuild. Skipping") return # Start rebuild self.stdout.write(backend_name + ": Starting rebuild") index = rebuilder.start() for model in get_indexed_models(): self.stdout.write(backend_name + ": Indexing model '%s.%s'" % ( model._meta.app_label, model.__name__, )) # Add model index.add_model(model) # Index objects object_count = 0 if not schema_only: # Add items (1000 at a time) for chunk in self.print_iter_progress(self.queryset_chunks(model.get_indexed_objects())): index.add_items(model, chunk) object_count += len(chunk) self.stdout.write("(indexed %d objects)" % object_count) self.print_newline() # Finish rebuild self.stdout.write(backend_name + ": Finishing rebuild") rebuilder.finish()
def filter_queryset(self, request, queryset, view): """ This performs a full-text search on the result set Eg: ?search=James Joyce """ search_enabled = getattr(settings, 'WAGTAILAPI_SEARCH_ENABLED', True) if 'search' in request.GET: if not search_enabled: raise BadRequestError("search is disabled") # Searching and filtering by tag at the same time is not supported if getattr(queryset, '_filtered_by_tag', False): raise BadRequestError("filtering by tag with a search query is not supported") search_query = request.GET['search'] sb = get_search_backend() queryset = sb.search(search_query, queryset) return queryset
def update_backend(self, backend_name, object_list): # Print info self.stdout.write(u"Updating backend: " + backend_name) # Get backend backend = get_search_backend(backend_name) # Get rebuilder rebuilder = backend.get_rebuilder() if not rebuilder: self.stdout.write(backend_name + u": Backend doesn't support rebuild. Skipping") return # Start rebuild self.stdout.write(backend_name + u": Starting rebuild") index = rebuilder.start() for model, queryset in object_list: self.stdout.write(backend_name + u": Indexing model '%s.%s'" % ( model._meta.app_label, model.__name__, )) # Add model index.add_model(model) # Add items (1000 at a time) count = 0 for chunk in self.print_iter_progress(self.queryset_chunks(queryset)): index.add_items(model, chunk) count += len(chunk) self.stdout.write(u"Indexed %d %s" % ( count, model._meta.verbose_name_plural)) self.print_newline() # Finish rebuild self.stdout.write(backend_name + u": Finishing rebuild") rebuilder.finish()
def update_backend(self, backend_name, models, object_list): # Print info self.stdout.write("Updating backend: " + backend_name) # Get backend backend = get_search_backend(backend_name) # Reset the index self.stdout.write(backend_name + ": Reseting index") backend.reset_index() # Add types self.stdout.write(backend_name + ": Adding types") for model in models: backend.add_type(model) # Add objects to index self.stdout.write(backend_name + ": Adding objects") for result in backend.add_bulk(object_list): self.stdout.write(result[0] + ' ' + str(result[1])) # Refresh index self.stdout.write(backend_name + ": Refreshing index") backend.refresh_index()
def get_search_backends(): if hasattr(settings, 'WAGTAILSEARCH_BACKENDS'): for backend in settings.WAGTAILSEARCH_BACKENDS.keys(): yield get_search_backend(backend) else: yield get_search_backend('default')
def collections(request): # PARAMETERS digital = request.GET.get('digital', None) if not digital == 'on': digital = None format = request.GET.get('format', None) if not format in Format.objects.all().values_list('text', flat=True): format = None location = request.GET.get('location', None) if not location in LocationPage.objects.live().values_list('title', flat=True): location = None search = request.GET.get('search', None) subject = request.GET.get('subject', None) if not subject in Subject.objects.all().values_list('name', flat=True): subject = None unit = request.GET.get('unit', None) view = request.GET.get('view', 'collections') if not view in ['collections', 'exhibits', 'subjects']: view = 'collections' # filter collections. collections = [] if view == 'collections': filter_arguments = {} # format if format: filter_arguments['collection_placements__format__text'] = format # subject if subject: filter_arguments['collection_subject_placements__subject__in'] = Subject.objects.get(name=subject).get_descendants() # search if search: filter_arguments['id__in'] = list(map(lambda s: s.id, CollectionPage.objects.live().search(search))) # unit if unit: filter_arguments['unit'] = UnitPage.objects.get(title=unit) collections = CollectionPage.objects.live().filter(**filter_arguments).distinct() # digital if digital: collections = collections.filter(collection_placements__format__text='Digital') # sort browses by title, omitting leading articles. if not search: collections = sorted(collections, key=lambda c: re.sub(r'^(A|An|The) ', '', c.title)) # fiter exhibits. exhibits = [] exhibits_current = [] if view == 'exhibits': filter_arguments = {} if location: filter_arguments['exhibit_location__title'] = location if subject: filter_arguments['exhibit_subject_placements__subject__in'] = Subject.objects.get(name=subject).get_descendants() if unit: filter_arguments['unit'] = UnitPage.objects.get(title=unit) exhibits = ExhibitPage.objects.live().filter(**filter_arguments).distinct() exhibits_current = exhibits.filter(exhibit_open_date__lt=datetime.datetime.now().date(), exhibit_close_date__gt=datetime.datetime.now().date()).distinct() if digital: exhibits = exhibits.exclude(web_exhibit_url = '') exhibits_current = exhibits_current.exclude(web_exhibit_url = '') if search: exhibits = exhibits.search(search).results() exhibits_current = exhibits_current.search(search).results() if not search: exhibits = sorted(exhibits, key=lambda e: re.sub(r'^(A|An|The) ', '', e.title)) exhibits_current = sorted(exhibits_current, key=lambda e: re.sub('r^(A|An|The) ', '', e.title)) # formats. formats = Format.objects.all().values_list('text', flat=True) # the formats pulldown should skip 'Digital'. That shows up as a checkbox. formats_pulldown = ['Archives & Manuscripts', 'Audio', 'Books & Journals', \ 'Images', 'Maps', 'Microform', 'Music Scores', 'Photographs', 'Reference Works', \ 'Statistics & Datasets', 'Video'] # locations locations = sorted(list(set(ExhibitPage.objects.exclude(exhibit_location=None).values_list('exhibit_location__title', flat=True)))) subjects = [] # for the code below, list all subjects that are children of the subjects in the list # above, plus anything with a libguide id. right now that is equal to # business, medicine and law. See DB's "collections subjects" lucid chart for more # info. subjects_queryset = Subject.objects.all() if search: s = get_search_backend() subjects_queryset = s.search(search, Subject) if subject: subject_ids = Subject.objects.get(name=subject).get_descendants() subjects_queryset = subjects_queryset.filter(id__in=subject_ids) subjects_with_collections = set(CollectionPageSubjectPlacement.objects.values_list('subject', flat=True)) subjects_with_exhibits = set(ExhibitPageSubjectPlacement.objects.values_list('subject', flat=True)) subjects_with_specialists = set(StaffPageSubjectPlacement.objects.values_list('subject', flat=True)) for s in subjects_queryset: subject_descendants = set(s.get_descendants().values_list('id', flat=True)) parents = SubjectParentRelations.objects.filter(child=s).order_by('parent__name').values_list('parent__name', flat=True) has_collections = bool(subjects_with_collections.intersection(subject_descendants)) has_exhibits = bool(subjects_with_exhibits.intersection(subject_descendants)) has_subject_specialists = s.id in subjects_with_specialists subjects.append({ 'has_collections': has_collections, 'has_exhibits': has_exhibits, 'has_subject_specialists': has_subject_specialists, 'libguide_url': s.libguide_url, 'name': s.name, 'parents': parents, 'see_also': None }) for see_also in s.see_also.all(): subjects.append({ 'has_collections': False, 'has_exhibits': False, 'has_subject_specialists': False, 'libguide_url': None, 'name': see_also.alias, 'parents': [], 'see_also': see_also.snippet.name }) subjects = sorted(subjects, key=lambda s: s['name']) # for the subject pulldown, find subjects that are first generation children- their parents should have no parent. # still need these: # Area and Cultural Studies # Social Sciences # Biological Sciences # Physical Sciences subjects_pulldown = ['Area & Cultural Studies', 'Arts', \ 'Biological Sciences', 'Business', 'Humanities', 'Law', 'Literature', \ 'Medicine', 'Physical Sciences', 'Social Sciences', 'Social Services', \ 'Special Collections'] default_image = None try: default_image = Image.objects.get(title="Default Placeholder Photo") except: pass # Set context variables for templates home_page = StandardPage.objects.live().get(id=PUBLIC_HOMEPAGE) location_and_hours = get_hours_and_location(home_page) page_location = str(location_and_hours['page_location']) page_unit = location_and_hours['page_unit'] return render(request, 'lib_collections/collections_index_page.html', { 'collections': collections, 'breadcrumb_div_css': 'col-md-12 breadcrumbs hidden-xs hidden-sm', 'content_div_css': 'container body-container col-xs-12 col-lg-11 col-lg-offset-1', 'default_image': default_image, 'digital': digital, 'exhibits': exhibits, 'exhibits_current': exhibits_current, 'format': format, 'formats': formats, 'formats_pulldown': formats_pulldown, 'location': location, 'locations': locations, 'search': search, 'self': { 'title': 'Collections & Exhibits' }, 'subject': subject, 'subjects': subjects, 'subjects_pulldown': subjects_pulldown, 'view': view, 'page_unit': str(page_unit), 'page_location': page_location, 'address': location_and_hours['address'], 'chat_url': get_unit_chat_link(page_unit, request), 'chat_status': get_chat_status('uofc-ask'), 'chat_status_css': get_chat_status_css('uofc-ask'), 'hours_page_url': home_page.get_hours_page(request), })