def top_contributors_new(request, area): to_json = JSONRenderer().render if area == "questions": api_endpoint = api.TopContributorsQuestions locales = sorted((settings.LOCALES[code].english, code) for code in QuestionLocale.objects.locales_list()) elif area == "l10n": api_endpoint = api.TopContributorsLocalization locales = sorted((settings.LOCALES[code].english, code) for code in settings.SUMO_LANGUAGES) else: raise Http404 if request.LANGUAGE_CODE != "en-US" and request.LANGUAGE_CODE in [ loc[1] for loc in locales ]: new_get = {"locale": request.LANGUAGE_CODE} new_get.update(request.GET) request.GET = new_get contributors = api_endpoint().get_data(request) products = ProductSerializer(Product.objects.filter(visible=True), many=True) return render( request, "community/top_contributors_react.html", { "area": area, "contributors_json": to_json(contributors), "locales_json": to_json(locales), "products_json": to_json(products.data), }, )
def template_json(data): """Returns json as a str Normally json is rendered as bytes, but putting bytes in a template that is itself encoded to bytes causes problems when trying to re-read the json from the template (with javascript). :arg data: the data to jsonify. :returns: str """ return JSONRenderer().render(data).decode()
def advanced_search(request): """Elasticsearch-specific Advanced search view""" to_json = JSONRenderer().render template = 'search/results.html' # 1. Prep request. r = request.GET.copy() # TODO: Figure out how to get rid of 'a' and do it. # It basically is used to switch between showing the form or results. a = request.GET.get('a', '2') # TODO: This is so the 'a=1' stays in the URL for pagination. r['a'] = 1 language = locale_or_default( request.GET.get('language', request.LANGUAGE_CODE)) r['language'] = language lang = language.lower() lang_name = settings.LANGUAGES_DICT.get(lang) or '' # 2. Build form. search_form = AdvancedSearchForm(r, auto_id=False) search_form.set_allowed_forums(request.user) # 3. Validate request. # Note: a == 2 means "show the form"--that's all we use it for now. if a == '2' or not search_form.is_valid(): if request.IS_JSON: return HttpResponse(json.dumps( {'error': _('Invalid search data.')}), content_type=request.CONTENT_TYPE, status=400) t = 'search/form.html' data = { 'advanced': True, 'request': request, 'search_form': search_form } # get value for search input from last search term. last_search = request.COOKIES.get(settings.LAST_SEARCH_COOKIE) # If there is any cached input from last search, pass it to template if last_search and 'q' not in r: cached_field = urlquote(last_search) data.update({'cached_field': cached_field}) return cache_control(render(request, t, data), settings.SEARCH_CACHE_PERIOD) # 4. Generate search. cleaned = search_form.cleaned_data # We use a regular S here because we want to search across # multiple doctypes. searcher = (AnalyzerS().es( urls=settings.ES_URLS, timeout=settings.ES_TIMEOUT, use_ssl=settings.ES_USE_SSL, http_auth=settings.ES_HTTP_AUTH, connection_class=RequestsHttpConnection).indexes( es_utils.read_index('default'))) doctypes = [] final_filter = F() unix_now = int(time.time()) interval_filters = (('created', cleaned['created'], cleaned['created_date']), ('updated', cleaned['updated'], cleaned['updated_date'])) # Start - wiki search configuration if cleaned['w'] & constants.WHERE_WIKI: wiki_f = F(model='wiki_document') # Category filter if cleaned['category']: wiki_f &= F(document_category__in=cleaned['category']) # Locale filter wiki_f &= F(document_locale=language) # Product filter products = cleaned['product'] for p in products: wiki_f &= F(product=p) # Topics filter topics = cleaned['topics'] for t in topics: wiki_f &= F(topic=t) # Archived bit if not cleaned['include_archived']: wiki_f &= F(document_is_archived=False) # Apply sortby sortby = cleaned['sortby_documents'] try: searcher = searcher.order_by(*constants.SORT_DOCUMENTS[sortby]) except IndexError: # Skip index errors because they imply the user is sending us sortby values # that aren't valid. pass doctypes.append(DocumentMappingType.get_mapping_type_name()) final_filter |= wiki_f # End - wiki search configuration # Start - support questions configuration if cleaned['w'] & constants.WHERE_SUPPORT: question_f = F(model='questions_question') # These filters are ternary, they can be either YES, NO, or OFF ternary_filters = ('is_locked', 'is_solved', 'has_answers', 'has_helpful', 'is_archived') d = dict(('question_%s' % filter_name, _ternary_filter(cleaned[filter_name])) for filter_name in ternary_filters if cleaned[filter_name]) if d: question_f &= F(**d) if cleaned['asked_by']: question_f &= F(question_creator=cleaned['asked_by']) if cleaned['answered_by']: question_f &= F(question_answer_creator=cleaned['answered_by']) q_tags = [t.strip() for t in cleaned['q_tags'].split(',')] for t in q_tags: if t: question_f &= F(question_tag=t) # Product filter products = cleaned['product'] for p in products: question_f &= F(product=p) # Topics filter topics = cleaned['topics'] for t in topics: question_f &= F(topic=t) # Note: num_voted (with a d) is a different field than num_votes # (with an s). The former is a dropdown and the latter is an # integer value. if cleaned['num_voted'] == constants.INTERVAL_BEFORE: question_f &= F( question_num_votes__lte=max(cleaned['num_votes'], 0)) elif cleaned['num_voted'] == constants.INTERVAL_AFTER: question_f &= F(question_num_votes__gte=cleaned['num_votes']) # Apply sortby sortby = cleaned['sortby'] try: searcher = searcher.order_by(*constants.SORT_QUESTIONS[sortby]) except IndexError: # Skip index errors because they imply the user is sending us sortby values # that aren't valid. pass # Apply created and updated filters for filter_name, filter_option, filter_date in interval_filters: if filter_option == constants.INTERVAL_BEFORE: before = { filter_name + '__gte': 0, filter_name + '__lte': max(filter_date, 0) } question_f &= F(**before) elif filter_option == constants.INTERVAL_AFTER: after = { filter_name + '__gte': min(filter_date, unix_now), filter_name + '__lte': unix_now } question_f &= F(**after) doctypes.append(QuestionMappingType.get_mapping_type_name()) final_filter |= question_f # End - support questions configuration # Start - discussion forum configuration if cleaned['w'] & constants.WHERE_DISCUSSION: discussion_f = F(model='forums_thread') if cleaned['author']: discussion_f &= F(post_author_ord=cleaned['author']) if cleaned['thread_type']: if constants.DISCUSSION_STICKY in cleaned['thread_type']: discussion_f &= F(post_is_sticky=1) if constants.DISCUSSION_LOCKED in cleaned['thread_type']: discussion_f &= F(post_is_locked=1) valid_forum_ids = [ f.id for f in Forum.authorized_forums_for_user(request.user) ] forum_ids = None if cleaned['forum']: forum_ids = [f for f in cleaned['forum'] if f in valid_forum_ids] # If we removed all the forums they wanted to look at or if # they didn't specify, then we filter on the list of all # forums they're authorized to look at. if not forum_ids: forum_ids = valid_forum_ids discussion_f &= F(post_forum_id__in=forum_ids) # Apply created and updated filters for filter_name, filter_option, filter_date in interval_filters: if filter_option == constants.INTERVAL_BEFORE: before = { filter_name + '__gte': 0, filter_name + '__lte': max(filter_date, 0) } discussion_f &= F(**before) elif filter_option == constants.INTERVAL_AFTER: after = { filter_name + '__gte': min(filter_date, unix_now), filter_name + '__lte': unix_now } discussion_f &= F(**after) doctypes.append(ThreadMappingType.get_mapping_type_name()) final_filter |= discussion_f # End - discussion forum configuration # Done with all the filtery stuff--time to generate results searcher = searcher.doctypes(*doctypes) searcher = searcher.filter(final_filter) if 'explain' in request.GET and request.GET['explain'] == '1': searcher = searcher.explain() cleaned_q = cleaned['q'] # Set up the highlights. Show the entire field highlighted. searcher = searcher.highlight( 'question_content', # support forum 'document_summary', # kb 'post_content', # contributor forum pre_tags=['<b>'], post_tags=['</b>'], number_of_fragments=0) searcher = apply_boosts(searcher) # Build the query if cleaned_q: query_fields = chain(*[ cls.get_query_fields() for cls in [DocumentMappingType, ThreadMappingType, QuestionMappingType] ]) query = {} # Create a simple_query_search query for every field we want to search. for field in query_fields: query['%s__sqs' % field] = cleaned_q # Transform the query to use locale aware analyzers. query = es_utils.es_query_with_analyzer(query, language) searcher = searcher.query(should=True, **query) searcher = searcher[:settings.SEARCH_MAX_RESULTS] # 5. Generate output pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE) if pages.paginator.count == 0: # If we know there aren't any results, show fallback_results. fallback_results = _fallback_results(language, cleaned['product']) results = [] else: fallback_results = None results = build_results_list(pages, request.IS_JSON) items = [(k, v) for k in search_form.fields for v in r.getlist(k) if v and k != 'a'] items.append(('a', '2')) product = Product.objects.filter(slug__in=cleaned['product']) if product: product_titles = [ pgettext('DB: products.Product.title', p.title) for p in product ] else: product_titles = [_('All Products')] # FIXME: This is probably bad l10n. product_titles = ', '.join(product_titles) data = { 'num_results': pages.paginator.count, 'results': results, 'fallback_results': fallback_results, 'product_titles': product_titles, 'q': cleaned['q'], 'w': cleaned['w'], 'lang_name': lang_name, 'advanced': True, 'products': Product.objects.filter(visible=True) } if request.IS_JSON: data['total'] = len(data['results']) data['products'] = [{ 'slug': p.slug, 'title': p.title } for p in data['products']] if product: data['product'] = product[0].slug pages = Paginator(pages) data['pagination'] = dict( number=pages.pager.number, num_pages=pages.pager.paginator.num_pages, has_next=pages.pager.has_next(), has_previous=pages.pager.has_previous(), max=pages.max, span=pages.span, dotted_upper=pages.pager.dotted_upper, dotted_lower=pages.pager.dotted_lower, page_range=pages.pager.page_range, url=pages.pager.url, ) if not results: data['message'] = _('No pages matched the search criteria') json_data = to_json(data) if request.JSON_CALLBACK: json_data = request.JSON_CALLBACK + '(' + json_data + ');' return HttpResponse(json_data, content_type=request.CONTENT_TYPE) data.update({ 'product': product, 'pages': pages, 'search_form': search_form }) resp = cache_control(render(request, template, data), settings.SEARCH_CACHE_PERIOD) resp.set_cookie(settings.LAST_SEARCH_COOKIE, urlquote(cleaned['q']), max_age=3600, secure=False, httponly=False) return resp
def simple_search(request): """Elasticsearch-specific simple search view. This view is for end user searching of the Knowledge Base and Support Forum. Filtering options are limited to: * product (`product=firefox`, for example, for only Firefox results) * document type (`w=2`, for example, for Support Forum questions only) """ to_json = JSONRenderer().render template = 'search/results.html' # 1. Prep request. # Redirect to old Advanced Search URLs (?a={1,2}) to the new URL. if request.GET.get('a') in ['1', '2']: new_url = reverse('search.advanced') + '?' + request.GET.urlencode() return HttpResponseRedirect(new_url) # 2. Build form. search_form = SimpleSearchForm(request.GET, auto_id=False) # 3. Validate request. if not search_form.is_valid(): if request.IS_JSON: return HttpResponse(json.dumps( {'error': _('Invalid search data.')}), content_type=request.CONTENT_TYPE, status=400) t = 'search/form.html' return cache_control( render(request, t, { 'advanced': False, 'request': request, 'search_form': search_form }), settings.SEARCH_CACHE_PERIOD) # 4. Generate search. cleaned = search_form.cleaned_data language = locale_or_default(cleaned['language'] or request.LANGUAGE_CODE) lang_name = settings.LANGUAGES_DICT.get(language.lower()) or '' searcher = generate_simple_search(search_form, language, with_highlights=True) searcher = searcher[:settings.SEARCH_MAX_RESULTS] # 5. Generate output. pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE) if pages.paginator.count == 0: fallback_results = _fallback_results(language, cleaned['product']) results = [] else: fallback_results = None results = build_results_list(pages, request.IS_JSON) product = Product.objects.filter(slug__in=cleaned['product']) if product: product_titles = [ pgettext('DB: products.Product.title', p.title) for p in product ] else: product_titles = [_('All Products')] # FIXME: This is probably bad l10n. product_titles = ', '.join(product_titles) data = { 'num_results': pages.paginator.count, 'results': results, 'fallback_results': fallback_results, 'product_titles': product_titles, 'q': cleaned['q'], 'w': cleaned['w'], 'lang_name': lang_name, 'products': Product.objects.filter(visible=True) } if request.IS_JSON: data['total'] = len(data['results']) data['products'] = [{ 'slug': p.slug, 'title': p.title } for p in data['products']] if product: data['product'] = product[0].slug pages = Paginator(pages) data['pagination'] = dict( number=pages.pager.number, num_pages=pages.pager.paginator.num_pages, has_next=pages.pager.has_next(), has_previous=pages.pager.has_previous(), max=pages.max, span=pages.span, dotted_upper=pages.pager.dotted_upper, dotted_lower=pages.pager.dotted_lower, page_range=pages.pager.page_range, url=pages.pager.url, ) if not results: data['message'] = _('No pages matched the search criteria') json_data = to_json(data) if request.JSON_CALLBACK: json_data = request.JSON_CALLBACK + '(' + json_data + ');' return HttpResponse(json_data, content_type=request.CONTENT_TYPE) data.update({ 'product': product, 'pages': pages, 'search_form': search_form, 'advanced': False, }) resp = cache_control(render(request, template, data), settings.SEARCH_CACHE_PERIOD) resp.set_cookie(settings.LAST_SEARCH_COOKIE, urlquote(cleaned['q']), max_age=3600, secure=False, httponly=False) return resp
def simple_search(request): search_form = SimpleSearchForm(request.GET, auto_id=False) if not search_form.is_valid(): return HttpResponse( json.dumps({"error": _("Invalid search data.")}), content_type="application/json", status=400, ) cleaned = search_form.cleaned_data # get language language = locale_or_default(cleaned["language"] or request.LANGUAGE_CODE) lang_name = settings.LANGUAGES_DICT.get(language.lower()) or "" # get product and product titles product, product_titles = _get_product_title(cleaned["product"]) # get page try: page = int(request.GET.get("page", 1)) except ValueError: page = 1 # create search object search = CompoundSearch(locale=language, product=product) # apply aaq/kb configs if cleaned["w"] & constants.WHERE_WIKI: search.add(WikiSearch) if cleaned["w"] & constants.WHERE_SUPPORT: search.add(QuestionSearch) # execute search search.run(cleaned["q"], page=page) total = search.total results = search.results # generate fallback results if necessary fallback_results = None if total == 0: fallback_results = _fallback_results(language, cleaned["product"]) # create results dictionary for instant search data = { "num_results": total, "total": total, "results": results, "fallback_results": fallback_results, "product_titles": product_titles, "q": cleaned["q"], "w": cleaned["w"], "lang_name": lang_name, "products": [ {"slug": p.slug, "title": pgettext("DB: products.Product.title", p.title)} for p in Product.objects.filter(visible=True) ], "pagination": _make_pagination(page, total), } if product: data["product"] = product.slug if not results: data["message"] = constants.NO_MATCH json_data = JSONRenderer().render(data) return HttpResponse(json_data, content_type="application/json")
def simple_search(request): """Elasticsearch-specific simple search view. This view is for end user searching of the Knowledge Base and Support Forum. Filtering options are limited to: * product (`product=firefox`, for example, for only Firefox results) * document type (`w=2`, for example, for Support Forum questions only) """ to_json = JSONRenderer().render template = "search/results.html" # 1. Prep request. # Redirect to old Advanced Search URLs (?a={1,2}) to the new URL. if request.GET.get("a") in ["1", "2"]: new_url = reverse("search.advanced") + "?" + request.GET.urlencode() return HttpResponseRedirect(new_url) # 2. Build form. search_form = SimpleSearchForm(request.GET, auto_id=False) # 3. Validate request. if not search_form.is_valid(): if request.IS_JSON: return HttpResponse( json.dumps({"error": _("Invalid search data.")}), content_type=request.CONTENT_TYPE, status=400, ) t = "search/form.html" return cache_control( render(request, t, { "advanced": False, "request": request, "search_form": search_form }), settings.SEARCH_CACHE_PERIOD, ) # 4. Generate search. cleaned = search_form.cleaned_data language = locale_or_default(cleaned["language"] or request.LANGUAGE_CODE) lang_name = settings.LANGUAGES_DICT.get(language.lower()) or "" searcher = generate_simple_search(search_form, language, with_highlights=True) searcher = searcher[:settings.SEARCH_MAX_RESULTS] # 5. Generate output. pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE) if pages.paginator.count == 0: fallback_results = _fallback_results(language, cleaned["product"]) results = [] else: fallback_results = None results = build_results_list(pages, request.IS_JSON) product = Product.objects.filter(slug__in=cleaned["product"]) if product: product_titles = [ pgettext("DB: products.Product.title", p.title) for p in product ] else: product_titles = [_("All Products")] # FIXME: This is probably bad l10n. product_titles = ", ".join(product_titles) data = { "num_results": pages.paginator.count, "results": results, "fallback_results": fallback_results, "product_titles": product_titles, "q": cleaned["q"], "w": cleaned["w"], "lang_name": lang_name, "products": Product.objects.filter(visible=True), } if request.IS_JSON: data["total"] = len(data["results"]) data["products"] = [{ "slug": p.slug, "title": p.title } for p in data["products"]] if product: data["product"] = product[0].slug pages = Paginator(pages) data["pagination"] = dict( number=pages.pager.number, num_pages=pages.pager.paginator.num_pages, has_next=pages.pager.has_next(), has_previous=pages.pager.has_previous(), max=pages.max, span=pages.span, dotted_upper=pages.pager.dotted_upper, dotted_lower=pages.pager.dotted_lower, page_range=pages.pager.page_range, url=pages.pager.url, ) if not results: data["message"] = constants.NO_MATCH json_data = to_json(data) if request.JSON_CALLBACK: json_data = request.JSON_CALLBACK + "(" + json_data + ");" return HttpResponse(json_data, content_type=request.CONTENT_TYPE) data.update({ "product": product, "pages": pages, "search_form": search_form, "advanced": False, }) resp = cache_control(render(request, template, data), settings.SEARCH_CACHE_PERIOD) resp.set_cookie( settings.LAST_SEARCH_COOKIE, urlquote(cleaned["q"]), max_age=3600, secure=False, httponly=False, ) return resp
def simple_search(request): is_json = request.GET.get("format") == "json" search_form = SimpleSearchForm(request.GET, auto_id=False) if not search_form.is_valid(): if not is_json: return render(request, "search/form.html", {"search_form": search_form}) return HttpResponse( json.dumps({"error": _("Invalid search data.")}), content_type="application/json", status=400, ) cleaned = search_form.cleaned_data # get language language = locale_or_default(cleaned["language"] or request.LANGUAGE_CODE) lang_name = settings.LANGUAGES_DICT.get(language.lower()) or "" # get product and product titles product, product_titles = _get_product_title(cleaned["product"]) # create search object search = CompoundSearch() # apply aaq/kb configs if cleaned["w"] & constants.WHERE_WIKI: search.add(WikiSearch(query=cleaned["q"], locale=language, product=product)) if cleaned["w"] & constants.WHERE_SUPPORT: search.add(QuestionSearch(query=cleaned["q"], locale=language, product=product)) # execute search page = paginate( request, search, per_page=settings.SEARCH_RESULTS_PER_PAGE, paginator_cls=SumoSearchPaginator, ) total = search.total results = search.results # generate fallback results if necessary fallback_results = None if total == 0: fallback_results = _fallback_results(language, cleaned["product"]) data = { "num_results": total, "results": results, "fallback_results": fallback_results, "product_titles": ", ".join(product_titles), "q": cleaned["q"], "w": cleaned["w"], "lang_name": lang_name, "products": Product.objects.filter(visible=True), } if not is_json: data.update( { "product": product, "pages": page, "search_form": search_form, } ) return render(request, "search/results.html", data) # create results dictionary for instant search data.update( { "total": total, "products": [ {"slug": p.slug, "title": pgettext("DB: products.Product.title", p.title)} for p in data["products"] ], "pagination": _make_pagination(page), } ) if product: data["product"] = product.slug if not results: data["message"] = constants.NO_MATCH json_data = JSONRenderer().render(data) return HttpResponse(json_data, content_type="application/json")