def advanced_search(request): """Elasticsearch-specific Advanced search view""" to_json = JSONRenderer().render template = 'search/results.html' # 1. Prep request. r = request.GET.copy() # TODO: Figure out how to get rid of 'a' and do it. # It basically is used to switch between showing the form or results. a = request.GET.get('a', '2') # TODO: This is so the 'a=1' stays in the URL for pagination. r['a'] = 1 language = locale_or_default( request.GET.get('language', request.LANGUAGE_CODE)) r['language'] = language lang = language.lower() lang_name = settings.LANGUAGES_DICT.get(lang) or '' # 2. Build form. search_form = AdvancedSearchForm(r, auto_id=False) search_form.set_allowed_forums(request.user) # 3. Validate request. # Note: a == 2 means "show the form"--that's all we use it for now. if a == '2' or not search_form.is_valid(): if request.IS_JSON: return HttpResponse(json.dumps( {'error': _('Invalid search data.')}), content_type=request.CONTENT_TYPE, status=400) t = 'search/form.html' data = { 'advanced': True, 'request': request, 'search_form': search_form } # get value for search input from last search term. last_search = request.COOKIES.get(settings.LAST_SEARCH_COOKIE) # If there is any cached input from last search, pass it to template if last_search and 'q' not in r: cached_field = urlquote(last_search) data.update({'cached_field': cached_field}) return cache_control(render(request, t, data), settings.SEARCH_CACHE_PERIOD) # 4. Generate search. cleaned = search_form.cleaned_data # We use a regular S here because we want to search across # multiple doctypes. searcher = (AnalyzerS().es( urls=settings.ES_URLS, timeout=settings.ES_TIMEOUT, use_ssl=settings.ES_USE_SSL, http_auth=settings.ES_HTTP_AUTH, connection_class=RequestsHttpConnection).indexes( es_utils.read_index('default'))) doctypes = [] final_filter = F() unix_now = int(time.time()) interval_filters = (('created', cleaned['created'], cleaned['created_date']), ('updated', cleaned['updated'], cleaned['updated_date'])) # Start - wiki search configuration if cleaned['w'] & constants.WHERE_WIKI: wiki_f = F(model='wiki_document') # Category filter if cleaned['category']: wiki_f &= F(document_category__in=cleaned['category']) # Locale filter wiki_f &= F(document_locale=language) # Product filter products = cleaned['product'] for p in products: wiki_f &= F(product=p) # Topics filter topics = cleaned['topics'] for t in topics: wiki_f &= F(topic=t) # Archived bit if not cleaned['include_archived']: wiki_f &= F(document_is_archived=False) # Apply sortby sortby = cleaned['sortby_documents'] try: searcher = searcher.order_by(*constants.SORT_DOCUMENTS[sortby]) except IndexError: # Skip index errors because they imply the user is sending us sortby values # that aren't valid. pass doctypes.append(DocumentMappingType.get_mapping_type_name()) final_filter |= wiki_f # End - wiki search configuration # Start - support questions configuration if cleaned['w'] & constants.WHERE_SUPPORT: question_f = F(model='questions_question') # These filters are ternary, they can be either YES, NO, or OFF ternary_filters = ('is_locked', 'is_solved', 'has_answers', 'has_helpful', 'is_archived') d = dict(('question_%s' % filter_name, _ternary_filter(cleaned[filter_name])) for filter_name in ternary_filters if cleaned[filter_name]) if d: question_f &= F(**d) if cleaned['asked_by']: question_f &= F(question_creator=cleaned['asked_by']) if cleaned['answered_by']: question_f &= F(question_answer_creator=cleaned['answered_by']) q_tags = [t.strip() for t in cleaned['q_tags'].split(',')] for t in q_tags: if t: question_f &= F(question_tag=t) # Product filter products = cleaned['product'] for p in products: question_f &= F(product=p) # Topics filter topics = cleaned['topics'] for t in topics: question_f &= F(topic=t) # Note: num_voted (with a d) is a different field than num_votes # (with an s). The former is a dropdown and the latter is an # integer value. if cleaned['num_voted'] == constants.INTERVAL_BEFORE: question_f &= F( question_num_votes__lte=max(cleaned['num_votes'], 0)) elif cleaned['num_voted'] == constants.INTERVAL_AFTER: question_f &= F(question_num_votes__gte=cleaned['num_votes']) # Apply sortby sortby = cleaned['sortby'] try: searcher = searcher.order_by(*constants.SORT_QUESTIONS[sortby]) except IndexError: # Skip index errors because they imply the user is sending us sortby values # that aren't valid. pass # Apply created and updated filters for filter_name, filter_option, filter_date in interval_filters: if filter_option == constants.INTERVAL_BEFORE: before = { filter_name + '__gte': 0, filter_name + '__lte': max(filter_date, 0) } question_f &= F(**before) elif filter_option == constants.INTERVAL_AFTER: after = { filter_name + '__gte': min(filter_date, unix_now), filter_name + '__lte': unix_now } question_f &= F(**after) doctypes.append(QuestionMappingType.get_mapping_type_name()) final_filter |= question_f # End - support questions configuration # Start - discussion forum configuration if cleaned['w'] & constants.WHERE_DISCUSSION: discussion_f = F(model='forums_thread') if cleaned['author']: discussion_f &= F(post_author_ord=cleaned['author']) if cleaned['thread_type']: if constants.DISCUSSION_STICKY in cleaned['thread_type']: discussion_f &= F(post_is_sticky=1) if constants.DISCUSSION_LOCKED in cleaned['thread_type']: discussion_f &= F(post_is_locked=1) valid_forum_ids = [ f.id for f in Forum.authorized_forums_for_user(request.user) ] forum_ids = None if cleaned['forum']: forum_ids = [f for f in cleaned['forum'] if f in valid_forum_ids] # If we removed all the forums they wanted to look at or if # they didn't specify, then we filter on the list of all # forums they're authorized to look at. if not forum_ids: forum_ids = valid_forum_ids discussion_f &= F(post_forum_id__in=forum_ids) # Apply created and updated filters for filter_name, filter_option, filter_date in interval_filters: if filter_option == constants.INTERVAL_BEFORE: before = { filter_name + '__gte': 0, filter_name + '__lte': max(filter_date, 0) } discussion_f &= F(**before) elif filter_option == constants.INTERVAL_AFTER: after = { filter_name + '__gte': min(filter_date, unix_now), filter_name + '__lte': unix_now } discussion_f &= F(**after) doctypes.append(ThreadMappingType.get_mapping_type_name()) final_filter |= discussion_f # End - discussion forum configuration # Done with all the filtery stuff--time to generate results searcher = searcher.doctypes(*doctypes) searcher = searcher.filter(final_filter) if 'explain' in request.GET and request.GET['explain'] == '1': searcher = searcher.explain() cleaned_q = cleaned['q'] # Set up the highlights. Show the entire field highlighted. searcher = searcher.highlight( 'question_content', # support forum 'document_summary', # kb 'post_content', # contributor forum pre_tags=['<b>'], post_tags=['</b>'], number_of_fragments=0) searcher = apply_boosts(searcher) # Build the query if cleaned_q: query_fields = chain(*[ cls.get_query_fields() for cls in [DocumentMappingType, ThreadMappingType, QuestionMappingType] ]) query = {} # Create a simple_query_search query for every field we want to search. for field in query_fields: query['%s__sqs' % field] = cleaned_q # Transform the query to use locale aware analyzers. query = es_utils.es_query_with_analyzer(query, language) searcher = searcher.query(should=True, **query) searcher = searcher[:settings.SEARCH_MAX_RESULTS] # 5. Generate output pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE) if pages.paginator.count == 0: # If we know there aren't any results, show fallback_results. fallback_results = _fallback_results(language, cleaned['product']) results = [] else: fallback_results = None results = build_results_list(pages, request.IS_JSON) items = [(k, v) for k in search_form.fields for v in r.getlist(k) if v and k != 'a'] items.append(('a', '2')) product = Product.objects.filter(slug__in=cleaned['product']) if product: product_titles = [ pgettext('DB: products.Product.title', p.title) for p in product ] else: product_titles = [_('All Products')] # FIXME: This is probably bad l10n. product_titles = ', '.join(product_titles) data = { 'num_results': pages.paginator.count, 'results': results, 'fallback_results': fallback_results, 'product_titles': product_titles, 'q': cleaned['q'], 'w': cleaned['w'], 'lang_name': lang_name, 'advanced': True, 'products': Product.objects.filter(visible=True) } if request.IS_JSON: data['total'] = len(data['results']) data['products'] = [{ 'slug': p.slug, 'title': p.title } for p in data['products']] if product: data['product'] = product[0].slug pages = Paginator(pages) data['pagination'] = dict( number=pages.pager.number, num_pages=pages.pager.paginator.num_pages, has_next=pages.pager.has_next(), has_previous=pages.pager.has_previous(), max=pages.max, span=pages.span, dotted_upper=pages.pager.dotted_upper, dotted_lower=pages.pager.dotted_lower, page_range=pages.pager.page_range, url=pages.pager.url, ) if not results: data['message'] = _('No pages matched the search criteria') json_data = to_json(data) if request.JSON_CALLBACK: json_data = request.JSON_CALLBACK + '(' + json_data + ');' return HttpResponse(json_data, content_type=request.CONTENT_TYPE) data.update({ 'product': product, 'pages': pages, 'search_form': search_form }) resp = cache_control(render(request, template, data), settings.SEARCH_CACHE_PERIOD) resp.set_cookie(settings.LAST_SEARCH_COOKIE, urlquote(cleaned['q']), max_age=3600, secure=False, httponly=False) return resp
def advanced_search(request, template=None): """Elasticsearch-specific Advanced search view""" to_json = JSONRenderer().render # 1. Prep request. r = request.GET.copy() # TODO: Figure out how to get rid of 'a' and do it. # It basically is used to switch between showing the form or results. a = request.GET.get('a', '2') # TODO: This is so the 'a=1' stays in the URL for pagination. r['a'] = 1 language = locale_or_default(request.GET.get('language', request.LANGUAGE_CODE)) r['language'] = language lang = language.lower() lang_name = settings.LANGUAGES_DICT.get(lang) or '' # 2. Build form. search_form = AdvancedSearchForm(r, auto_id=False) search_form.set_allowed_forums(request.user) # 3. Validate request. # Note: a == 2 means "show the form"--that's all we use it for now. if a == '2' or not search_form.is_valid(): if request.IS_JSON: return HttpResponse( json.dumps({'error': _('Invalid search data.')}), content_type=request.CONTENT_TYPE, status=400) t = template if request.MOBILE else 'search/form.html' data = {'advanced': True, 'request': request, 'search_form': search_form} # get value for search input from last search term. last_search = request.COOKIES.get(settings.LAST_SEARCH_COOKIE) # If there is any cached input from last search, pass it to template if last_search and 'q' not in r: cached_field = urlquote(last_search) data.update({'cached_field': cached_field}) return cache_control( render(request, t, data), settings.SEARCH_CACHE_PERIOD) # 4. Generate search. cleaned = search_form.cleaned_data # On mobile, we default to just wiki results. if request.MOBILE and cleaned['w'] == constants.WHERE_BASIC: cleaned['w'] = constants.WHERE_WIKI # We use a regular S here because we want to search across # multiple doctypes. searcher = (AnalyzerS().es(urls=settings.ES_URLS) .indexes(es_utils.read_index('default'))) doctypes = [] final_filter = F() unix_now = int(time.time()) interval_filters = ( ('created', cleaned['created'], cleaned['created_date']), ('updated', cleaned['updated'], cleaned['updated_date']) ) # Start - wiki search configuration if cleaned['w'] & constants.WHERE_WIKI: wiki_f = F(model='wiki_document') # Category filter if cleaned['category']: wiki_f &= F(document_category__in=cleaned['category']) # Locale filter wiki_f &= F(document_locale=language) # Product filter products = cleaned['product'] for p in products: wiki_f &= F(product=p) # Topics filter topics = cleaned['topics'] for t in topics: wiki_f &= F(topic=t) # Archived bit if not cleaned['include_archived']: wiki_f &= F(document_is_archived=False) # Apply sortby sortby = cleaned['sortby_documents'] try: searcher = searcher.order_by(*constants.SORT_DOCUMENTS[sortby]) except IndexError: # Skip index errors because they imply the user is sending us sortby values # that aren't valid. pass doctypes.append(DocumentMappingType.get_mapping_type_name()) final_filter |= wiki_f # End - wiki search configuration # Start - support questions configuration if cleaned['w'] & constants.WHERE_SUPPORT: question_f = F(model='questions_question') # These filters are ternary, they can be either YES, NO, or OFF ternary_filters = ('is_locked', 'is_solved', 'has_answers', 'has_helpful', 'is_archived') d = dict(('question_%s' % filter_name, _ternary_filter(cleaned[filter_name])) for filter_name in ternary_filters if cleaned[filter_name]) if d: question_f &= F(**d) if cleaned['asked_by']: question_f &= F(question_creator=cleaned['asked_by']) if cleaned['answered_by']: question_f &= F(question_answer_creator=cleaned['answered_by']) q_tags = [t.strip() for t in cleaned['q_tags'].split(',')] for t in q_tags: if t: question_f &= F(question_tag=t) # Product filter products = cleaned['product'] for p in products: question_f &= F(product=p) # Topics filter topics = cleaned['topics'] for t in topics: question_f &= F(topic=t) # Note: num_voted (with a d) is a different field than num_votes # (with an s). The former is a dropdown and the latter is an # integer value. if cleaned['num_voted'] == constants.INTERVAL_BEFORE: question_f &= F(question_num_votes__lte=max(cleaned['num_votes'], 0)) elif cleaned['num_voted'] == constants.INTERVAL_AFTER: question_f &= F(question_num_votes__gte=cleaned['num_votes']) # Apply sortby sortby = cleaned['sortby'] try: searcher = searcher.order_by(*constants.SORT_QUESTIONS[sortby]) except IndexError: # Skip index errors because they imply the user is sending us sortby values # that aren't valid. pass # Apply created and updated filters for filter_name, filter_option, filter_date in interval_filters: if filter_option == constants.INTERVAL_BEFORE: before = {filter_name + '__gte': 0, filter_name + '__lte': max(filter_date, 0)} question_f &= F(**before) elif filter_option == constants.INTERVAL_AFTER: after = {filter_name + '__gte': min(filter_date, unix_now), filter_name + '__lte': unix_now} question_f &= F(**after) doctypes.append(QuestionMappingType.get_mapping_type_name()) final_filter |= question_f # End - support questions configuration # Start - discussion forum configuration if cleaned['w'] & constants.WHERE_DISCUSSION: discussion_f = F(model='forums_thread') if cleaned['author']: discussion_f &= F(post_author_ord=cleaned['author']) if cleaned['thread_type']: if constants.DISCUSSION_STICKY in cleaned['thread_type']: discussion_f &= F(post_is_sticky=1) if constants.DISCUSSION_LOCKED in cleaned['thread_type']: discussion_f &= F(post_is_locked=1) valid_forum_ids = [f.id for f in Forum.authorized_forums_for_user(request.user)] forum_ids = None if cleaned['forum']: forum_ids = [f for f in cleaned['forum'] if f in valid_forum_ids] # If we removed all the forums they wanted to look at or if # they didn't specify, then we filter on the list of all # forums they're authorized to look at. if not forum_ids: forum_ids = valid_forum_ids discussion_f &= F(post_forum_id__in=forum_ids) # Apply created and updated filters for filter_name, filter_option, filter_date in interval_filters: if filter_option == constants.INTERVAL_BEFORE: before = {filter_name + '__gte': 0, filter_name + '__lte': max(filter_date, 0)} discussion_f &= F(**before) elif filter_option == constants.INTERVAL_AFTER: after = {filter_name + '__gte': min(filter_date, unix_now), filter_name + '__lte': unix_now} discussion_f &= F(**after) doctypes.append(ThreadMappingType.get_mapping_type_name()) final_filter |= discussion_f # End - discussion forum configuration # Done with all the filtery stuff--time to generate results searcher = searcher.doctypes(*doctypes) searcher = searcher.filter(final_filter) if 'explain' in request.GET and request.GET['explain'] == '1': searcher = searcher.explain() cleaned_q = cleaned['q'] # Set up the highlights. Show the entire field highlighted. searcher = searcher.highlight( 'question_content', # support forum 'document_summary', # kb 'post_content', # contributor forum pre_tags=['<b>'], post_tags=['</b>'], number_of_fragments=0) searcher = apply_boosts(searcher) # Build the query if cleaned_q: query_fields = chain(*[ cls.get_query_fields() for cls in [ DocumentMappingType, ThreadMappingType, QuestionMappingType ] ]) query = {} # Create a simple_query_search query for every field we want to search. for field in query_fields: query['%s__sqs' % field] = cleaned_q # Transform the query to use locale aware analyzers. query = es_utils.es_query_with_analyzer(query, language) searcher = searcher.query(should=True, **query) searcher = searcher[:settings.SEARCH_MAX_RESULTS] # 5. Generate output pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE) if pages.paginator.count == 0: # If we know there aren't any results, show fallback_results. fallback_results = _fallback_results(language, cleaned['product']) results = [] else: fallback_results = None results = build_results_list(pages, request.IS_JSON) items = [(k, v) for k in search_form.fields for v in r.getlist(k) if v and k != 'a'] items.append(('a', '2')) product = Product.objects.filter(slug__in=cleaned['product']) if product: product_titles = [pgettext('DB: products.Product.title', p.title) for p in product] else: product_titles = [_('All Products')] # FIXME: This is probably bad l10n. product_titles = ', '.join(product_titles) data = { 'num_results': pages.paginator.count, 'results': results, 'fallback_results': fallback_results, 'product_titles': product_titles, 'q': cleaned['q'], 'w': cleaned['w'], 'lang_name': lang_name, 'advanced': True, 'products': Product.objects.filter(visible=True) } if request.IS_JSON: data['total'] = len(data['results']) data['products'] = [{'slug': p.slug, 'title': p.title} for p in data['products']] if product: data['product'] = product[0].slug pages = Paginator(pages) data['pagination'] = dict( number=pages.pager.number, num_pages=pages.pager.paginator.num_pages, has_next=pages.pager.has_next(), has_previous=pages.pager.has_previous(), max=pages.max, span=pages.span, dotted_upper=pages.pager.dotted_upper, dotted_lower=pages.pager.dotted_lower, page_range=pages.pager.page_range, url=pages.pager.url, ) if not results: data['message'] = _('No pages matched the search criteria') json_data = to_json(data) if request.JSON_CALLBACK: json_data = request.JSON_CALLBACK + '(' + json_data + ');' return HttpResponse(json_data, content_type=request.CONTENT_TYPE) data.update({ 'product': product, 'pages': pages, 'search_form': search_form }) resp = cache_control(render(request, template, data), settings.SEARCH_CACHE_PERIOD) resp.set_cookie(settings.LAST_SEARCH_COOKIE, urlquote(cleaned['q']), max_age=3600, secure=False, httponly=False) return resp