Exemplo n.º 1
0
    def test_with_highlights(self):
        form = SimpleSearchForm({'q': 'foo'})
        ok_(form.is_valid())

        s = generate_simple_search(form, 'en-US', with_highlights=True)
        ok_('highlight' in s.build_search())

        s = generate_simple_search(form, 'en-US', with_highlights=False)
        ok_('highlight' not in s.build_search())
Exemplo n.º 2
0
    def test_with_highlights(self):
        form = SimpleSearchForm({"q": "foo"})
        ok_(form.is_valid())

        s = generate_simple_search(form, "en-US", with_highlights=True)
        ok_("highlight" in s.build_search())

        s = generate_simple_search(form, "en-US", with_highlights=False)
        ok_("highlight" not in s.build_search())
Exemplo n.º 3
0
def opensearch_suggestions(request):
    """A simple search view that returns OpenSearch suggestions."""
    content_type = 'application/x-suggestions+json'
    search_form = SimpleSearchForm(request.GET, auto_id=False)
    if not search_form.is_valid():
        return HttpResponseBadRequest(content_type=content_type)

    cleaned = search_form.cleaned_data
    language = locale_or_default(cleaned['language'] or request.LANGUAGE_CODE)
    searcher = generate_simple_search(search_form,
                                      language,
                                      with_highlights=False)
    searcher = searcher.values_dict('document_title', 'question_title', 'url')
    results = searcher[:10]

    def urlize(r):
        return '%s://%s%s' % ('https' if request.is_secure() else 'http',
                              request.get_host(), r['url'][0])

    def titleize(r):
        # NB: Elasticsearch returns an array of strings as the value, so we mimic that and
        # then pull out the first (and only) string.
        return r.get('document_title', r.get('question_title',
                                             [_('No title')]))[0]

    try:
        data = [
            cleaned['q'], [titleize(r) for r in results], [],
            [urlize(r) for r in results]
        ]
    except ES_EXCEPTIONS:
        # If we have Elasticsearch problems, we just send back an empty set of results.
        data = []

    return HttpResponse(json.dumps(data), content_type=content_type)
Exemplo n.º 4
0
    def test_language_zh_cn(self):
        form = SimpleSearchForm({"q": "foo"})
        ok_(form.is_valid())

        s = generate_simple_search(form, "zh-CN", with_highlights=False)

        s_string = str(s.build_search())
        # Verify locale
        ok_("{'term': {'document_locale': 'zh-CN'}}" in s_string)
        # Verify standard analyzer is used
        ok_("'analyzer': 'chinese'" in s_string)
Exemplo n.º 5
0
    def test_language_fr(self):
        form = SimpleSearchForm({"q": "foo"})
        ok_(form.is_valid())

        s = generate_simple_search(form, "fr", with_highlights=False)

        s_string = str(s.build_search())
        # Verify locale
        ok_("{'term': {'document_locale': 'fr'}}" in s_string)
        # Verify fr has right synonym-less analyzer
        ok_("'analyzer': 'snowball-french'" in s_string)
Exemplo n.º 6
0
    def test_language_en_us(self):
        form = SimpleSearchForm({"q": "foo"})
        ok_(form.is_valid())

        s = generate_simple_search(form, "en-US", with_highlights=False)

        # NB: Comparing bits of big trees is hard, so we serialize it
        # and look for strings.
        s_string = str(s.build_search())
        # Verify locale
        ok_("{'term': {'document_locale': 'en-US'}}" in s_string)
        # Verify en-US has the right synonym-enhanced analyzer
        ok_("'analyzer': 'snowball-english-synonyms'" in s_string)
Exemplo n.º 7
0
def opensearch_suggestions(request):
    """A simple search view that returns OpenSearch suggestions."""
    content_type = 'application/x-suggestions+json'

    search_form = SimpleSearchForm(request.GET, auto_id=False)
    if not search_form.is_valid():
        return HttpResponseBadRequest(content_type=content_type)

    cleaned = search_form.cleaned_data
    language = locale_or_default(cleaned['language'] or request.LANGUAGE_CODE)
    searcher = generate_simple_search(search_form, language, with_highlights=False)
    searcher = searcher.values_dict('document_title', 'question_title', 'url')
    results = searcher[:10]

    def urlize(r):
        return u'%s://%s%s' % (
            'https' if request.is_secure() else 'http',
            request.get_host(),
            r['url'][0]
        )

    def titleize(r):
        # NB: Elasticsearch returns an array of strings as the value, so we mimic that and
        # then pull out the first (and only) string.
        return r.get('document_title', r.get('question_title', [_('No title')]))[0]

    try:
        data = [
            cleaned['q'],
            [titleize(r) for r in results],
            [],
            [urlize(r) for r in results]
        ]
    except ES_EXCEPTIONS:
        # If we have Elasticsearch problems, we just send back an empty set of results.
        data = []

    return HttpResponse(json.dumps(data), content_type=content_type)
Exemplo n.º 8
0
def simple_search(request):
    """Elasticsearch-specific simple search view.

    This view is for end user searching of the Knowledge Base and
    Support Forum. Filtering options are limited to:

    * product (`product=firefox`, for example, for only Firefox results)
    * document type (`w=2`, for example, for Support Forum questions only)

    """

    to_json = JSONRenderer().render
    template = 'search/results.html'

    # 1. Prep request.
    # Redirect to old Advanced Search URLs (?a={1,2}) to the new URL.
    if request.GET.get('a') in ['1', '2']:
        new_url = reverse('search.advanced') + '?' + request.GET.urlencode()
        return HttpResponseRedirect(new_url)

    # 2. Build form.
    search_form = SimpleSearchForm(request.GET, auto_id=False)

    # 3. Validate request.
    if not search_form.is_valid():
        if request.IS_JSON:
            return HttpResponse(json.dumps(
                {'error': _('Invalid search data.')}),
                                content_type=request.CONTENT_TYPE,
                                status=400)

        t = 'search/form.html'
        return cache_control(
            render(request, t, {
                'advanced': False,
                'request': request,
                'search_form': search_form
            }), settings.SEARCH_CACHE_PERIOD)

    # 4. Generate search.
    cleaned = search_form.cleaned_data

    language = locale_or_default(cleaned['language'] or request.LANGUAGE_CODE)
    lang_name = settings.LANGUAGES_DICT.get(language.lower()) or ''

    searcher = generate_simple_search(search_form,
                                      language,
                                      with_highlights=True)
    searcher = searcher[:settings.SEARCH_MAX_RESULTS]

    # 5. Generate output.
    pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE)

    if pages.paginator.count == 0:
        fallback_results = _fallback_results(language, cleaned['product'])
        results = []

    else:
        fallback_results = None
        results = build_results_list(pages, request.IS_JSON)

    product = Product.objects.filter(slug__in=cleaned['product'])
    if product:
        product_titles = [
            pgettext('DB: products.Product.title', p.title) for p in product
        ]
    else:
        product_titles = [_('All Products')]

    # FIXME: This is probably bad l10n.
    product_titles = ', '.join(product_titles)

    data = {
        'num_results': pages.paginator.count,
        'results': results,
        'fallback_results': fallback_results,
        'product_titles': product_titles,
        'q': cleaned['q'],
        'w': cleaned['w'],
        'lang_name': lang_name,
        'products': Product.objects.filter(visible=True)
    }

    if request.IS_JSON:
        data['total'] = len(data['results'])
        data['products'] = [{
            'slug': p.slug,
            'title': p.title
        } for p in data['products']]

        if product:
            data['product'] = product[0].slug

        pages = Paginator(pages)
        data['pagination'] = dict(
            number=pages.pager.number,
            num_pages=pages.pager.paginator.num_pages,
            has_next=pages.pager.has_next(),
            has_previous=pages.pager.has_previous(),
            max=pages.max,
            span=pages.span,
            dotted_upper=pages.pager.dotted_upper,
            dotted_lower=pages.pager.dotted_lower,
            page_range=pages.pager.page_range,
            url=pages.pager.url,
        )
        if not results:
            data['message'] = _('No pages matched the search criteria')

        json_data = to_json(data)
        if request.JSON_CALLBACK:
            json_data = request.JSON_CALLBACK + '(' + json_data + ');'
        return HttpResponse(json_data, content_type=request.CONTENT_TYPE)

    data.update({
        'product': product,
        'pages': pages,
        'search_form': search_form,
        'advanced': False,
    })
    resp = cache_control(render(request, template, data),
                         settings.SEARCH_CACHE_PERIOD)
    resp.set_cookie(settings.LAST_SEARCH_COOKIE,
                    urlquote(cleaned['q']),
                    max_age=3600,
                    secure=False,
                    httponly=False)
    return resp
Exemplo n.º 9
0
def simple_search(request):
    """Elasticsearch-specific simple search view.

    This view is for end user searching of the Knowledge Base and
    Support Forum. Filtering options are limited to:

    * product (`product=firefox`, for example, for only Firefox results)
    * document type (`w=2`, for example, for Support Forum questions only)

    """

    to_json = JSONRenderer().render
    template = "search/results.html"

    # 1. Prep request.
    # Redirect to old Advanced Search URLs (?a={1,2}) to the new URL.
    if request.GET.get("a") in ["1", "2"]:
        new_url = reverse("search.advanced") + "?" + request.GET.urlencode()
        return HttpResponseRedirect(new_url)

    # 2. Build form.
    search_form = SimpleSearchForm(request.GET, auto_id=False)

    # 3. Validate request.
    if not search_form.is_valid():
        if request.IS_JSON:
            return HttpResponse(
                json.dumps({"error": _("Invalid search data.")}),
                content_type=request.CONTENT_TYPE,
                status=400,
            )

        t = "search/form.html"
        return cache_control(
            render(request, t, {
                "advanced": False,
                "request": request,
                "search_form": search_form
            }),
            settings.SEARCH_CACHE_PERIOD,
        )

    # 4. Generate search.
    cleaned = search_form.cleaned_data

    language = locale_or_default(cleaned["language"] or request.LANGUAGE_CODE)
    lang_name = settings.LANGUAGES_DICT.get(language.lower()) or ""

    searcher = generate_simple_search(search_form,
                                      language,
                                      with_highlights=True)
    searcher = searcher[:settings.SEARCH_MAX_RESULTS]

    # 5. Generate output.
    pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE)

    if pages.paginator.count == 0:
        fallback_results = _fallback_results(language, cleaned["product"])
        results = []

    else:
        fallback_results = None
        results = build_results_list(pages, request.IS_JSON)

    product = Product.objects.filter(slug__in=cleaned["product"])
    if product:
        product_titles = [
            pgettext("DB: products.Product.title", p.title) for p in product
        ]
    else:
        product_titles = [_("All Products")]

    # FIXME: This is probably bad l10n.
    product_titles = ", ".join(product_titles)

    data = {
        "num_results": pages.paginator.count,
        "results": results,
        "fallback_results": fallback_results,
        "product_titles": product_titles,
        "q": cleaned["q"],
        "w": cleaned["w"],
        "lang_name": lang_name,
        "products": Product.objects.filter(visible=True),
    }

    if request.IS_JSON:
        data["total"] = len(data["results"])
        data["products"] = [{
            "slug": p.slug,
            "title": p.title
        } for p in data["products"]]

        if product:
            data["product"] = product[0].slug

        pages = Paginator(pages)
        data["pagination"] = dict(
            number=pages.pager.number,
            num_pages=pages.pager.paginator.num_pages,
            has_next=pages.pager.has_next(),
            has_previous=pages.pager.has_previous(),
            max=pages.max,
            span=pages.span,
            dotted_upper=pages.pager.dotted_upper,
            dotted_lower=pages.pager.dotted_lower,
            page_range=pages.pager.page_range,
            url=pages.pager.url,
        )
        if not results:
            data["message"] = constants.NO_MATCH

        json_data = to_json(data)
        if request.JSON_CALLBACK:
            json_data = request.JSON_CALLBACK + "(" + json_data + ");"
        return HttpResponse(json_data, content_type=request.CONTENT_TYPE)

    data.update({
        "product": product,
        "pages": pages,
        "search_form": search_form,
        "advanced": False,
    })
    resp = cache_control(render(request, template, data),
                         settings.SEARCH_CACHE_PERIOD)
    resp.set_cookie(
        settings.LAST_SEARCH_COOKIE,
        urlquote(cleaned["q"]),
        max_age=3600,
        secure=False,
        httponly=False,
    )
    return resp
Exemplo n.º 10
0
def simple_search(request, template=None):
    """Elasticsearch-specific simple search view.

    This view is for end user searching of the Knowledge Base and
    Support Forum. Filtering options are limited to:

    * product (`product=firefox`, for example, for only Firefox results)
    * document type (`w=2`, for example, for Support Forum questions only)

    """

    to_json = JSONRenderer().render

    # 1. Prep request.
    # Redirect to old Advanced Search URLs (?a={1,2}) to the new URL.
    if request.GET.get('a') in ['1', '2']:
        new_url = reverse('search.advanced') + '?' + request.GET.urlencode()
        return HttpResponseRedirect(new_url)

    # 2. Build form.
    search_form = SimpleSearchForm(request.GET, auto_id=False)

    # 3. Validate request.
    if not search_form.is_valid():
        if request.IS_JSON:
            return HttpResponse(
                json.dumps({'error': _('Invalid search data.')}),
                content_type=request.CONTENT_TYPE,
                status=400)

        t = template if request.MOBILE else 'search/form.html'
        return cache_control(
            render(request, t, {
                'advanced': False,
                'request': request,
                'search_form': search_form}),
            settings.SEARCH_CACHE_PERIOD)

    # 4. Generate search.
    cleaned = search_form.cleaned_data

    # On mobile, we default to just wiki results.
    if request.MOBILE and cleaned['w'] == constants.WHERE_BASIC:
        cleaned['w'] = constants.WHERE_WIKI

    language = locale_or_default(cleaned['language'] or request.LANGUAGE_CODE)
    lang_name = settings.LANGUAGES_DICT.get(language.lower()) or ''

    searcher = generate_simple_search(search_form, language, with_highlights=True)
    searcher = searcher[:settings.SEARCH_MAX_RESULTS]

    # 5. Generate output.
    pages = paginate(request, searcher, settings.SEARCH_RESULTS_PER_PAGE)

    if pages.paginator.count == 0:
        fallback_results = _fallback_results(language, cleaned['product'])
        results = []

    else:
        fallback_results = None
        results = build_results_list(pages, request.IS_JSON)

    product = Product.objects.filter(slug__in=cleaned['product'])
    if product:
        product_titles = [pgettext('DB: products.Product.title', p.title) for p in product]
    else:
        product_titles = [_('All Products')]

    # FIXME: This is probably bad l10n.
    product_titles = ', '.join(product_titles)

    data = {
        'num_results': pages.paginator.count,
        'results': results,
        'fallback_results': fallback_results,
        'product_titles': product_titles,
        'q': cleaned['q'],
        'w': cleaned['w'],
        'lang_name': lang_name,
        'products': Product.objects.filter(visible=True)}

    if request.IS_JSON:
        data['total'] = len(data['results'])
        data['products'] = [{'slug': p.slug, 'title': p.title}
                            for p in data['products']]

        if product:
            data['product'] = product[0].slug

        pages = Paginator(pages)
        data['pagination'] = dict(
            number=pages.pager.number,
            num_pages=pages.pager.paginator.num_pages,
            has_next=pages.pager.has_next(),
            has_previous=pages.pager.has_previous(),
            max=pages.max,
            span=pages.span,
            dotted_upper=pages.pager.dotted_upper,
            dotted_lower=pages.pager.dotted_lower,
            page_range=pages.pager.page_range,
            url=pages.pager.url,
        )
        if not results:
            data['message'] = _('No pages matched the search criteria')

        json_data = to_json(data)
        if request.JSON_CALLBACK:
            json_data = request.JSON_CALLBACK + '(' + json_data + ');'
        return HttpResponse(json_data, content_type=request.CONTENT_TYPE)

    data.update({
        'product': product,
        'pages': pages,
        'search_form': search_form,
        'advanced': False,
    })
    resp = cache_control(render(request, template, data), settings.SEARCH_CACHE_PERIOD)
    resp.set_cookie(settings.LAST_SEARCH_COOKIE, urlquote(cleaned['q']),
                    max_age=3600, secure=False, httponly=False)
    return resp