Exemplo n.º 1
0
    def search(self):
        if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
            super(SearchWithPlugins, self).search()

        plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)

        results = self.result_container.get_ordered_results()

        for result in results:
            plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)

        return self.result_container
Exemplo n.º 2
0
    def search(self):
        if plugins.call('pre_search', self.request, self):
            super(SearchWithPlugins, self).search()

        plugins.call('post_search', self.request, self)

        results = self.result_container.get_ordered_results()

        for result in results:
            plugins.call('on_result', self.request, self, result)

        return self.result_container
Exemplo n.º 3
0
    def search(self):
        if plugins.call(self.ordered_plugin_list, "pre_search", self.request, self):
            super().search()

        plugins.call(self.ordered_plugin_list, "post_search", self.request, self)

        results = self.result_container.get_ordered_results()

        for result in results:
            plugins.call(
                self.ordered_plugin_list, "on_result", self.request, self, result
            )

        return self.result_container
Exemplo n.º 4
0
def index():
    """Render index page.

    Supported outputs: html, json, csv, rss.
    """

    if not request.args and not request.form:
        return render(
            'index.html',
        )

    try:
        search = Search(request)
    except:
        return render(
            'index.html',
        )

    if plugins.call('pre_search', request, locals()):
        search.search(request)

    plugins.call('post_search', request, locals())

    for result in search.result_container.get_ordered_results():

        plugins.call('on_result', request, locals())
        if not search.paging and engines[result['engine']].paging:
            search.paging = True

        if search.request_data.get('format', 'html') == 'html':
            if 'content' in result:
                result['content'] = highlight_content(result['content'],
                                                      search.query.encode('utf-8'))  # noqa
            result['title'] = highlight_content(result['title'],
                                                search.query.encode('utf-8'))
        else:
            if result.get('content'):
                result['content'] = html_to_text(result['content']).strip()
            # removing html content and whitespace duplications
            result['title'] = ' '.join(html_to_text(result['title']).strip().split())

        result['pretty_url'] = prettify_url(result['url'])

        # TODO, check if timezone is calculated right
        if 'publishedDate' in result:
            result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
            if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
                timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
                minutes = int((timedifference.seconds / 60) % 60)
                hours = int(timedifference.seconds / 60 / 60)
                if hours == 0:
                    result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes)
                else:
                    result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes)  # noqa
            else:
                result['publishedDate'] = format_date(result['publishedDate'])

    if search.request_data.get('format') == 'json':
        return Response(json.dumps({'query': search.query,
                                    'results': search.result_container.get_ordered_results()}),
                        mimetype='application/json')
    elif search.request_data.get('format') == 'csv':
        csv = UnicodeWriter(cStringIO.StringIO())
        keys = ('title', 'url', 'content', 'host', 'engine', 'score')
        csv.writerow(keys)
        for row in search.result_container.get_ordered_results():
            row['host'] = row['parsed_url'].netloc
            csv.writerow([row.get(key, '') for key in keys])
        csv.stream.seek(0)
        response = Response(csv.stream.read(), mimetype='application/csv')
        cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
        response.headers.add('Content-Disposition', cont_disp)
        return response
    elif search.request_data.get('format') == 'rss':
        response_rss = render(
            'opensearch_response_rss.xml',
            results=search.result_container.get_ordered_results(),
            q=search.request_data['q'],
            number_of_results=search.result_container.results_length(),
            base_url=get_base_url()
        )
        return Response(response_rss, mimetype='text/xml')

    return render(
        'results.html',
        results=search.result_container.get_ordered_results(),
        q=search.request_data['q'],
        selected_categories=search.categories,
        paging=search.paging,
        pageno=search.pageno,
        base_url=get_base_url(),
        suggestions=search.result_container.suggestions,
        answers=search.result_container.answers,
        infoboxes=search.result_container.infoboxes,
        theme=get_current_theme_name(),
        favicons=global_favicons[themes.index(get_current_theme_name())]
    )
Exemplo n.º 5
0
def index():
    """Render index page.

    Supported outputs: html, json, csv, rss.
    """

    if not request.args and not request.form:
        return render('index.html', )

    try:
        search = Search(request)
    except:
        return render('index.html', )

    if plugins.call('pre_search', request, locals()):
        search.search(request)

    plugins.call('post_search', request, locals())

    for result in search.result_container.get_ordered_results():

        plugins.call('on_result', request, locals())
        if not search.paging and engines[result['engine']].paging:
            search.paging = True

        if search.request_data.get('format', 'html') == 'html':
            if 'content' in result:
                result['content'] = highlight_content(
                    result['content'], search.query.encode('utf-8'))  # noqa
            result['title'] = highlight_content(result['title'],
                                                search.query.encode('utf-8'))
        else:
            if result.get('content'):
                result['content'] = html_to_text(result['content']).strip()
            # removing html content and whitespace duplications
            result['title'] = ' '.join(
                html_to_text(result['title']).strip().split())

        result['pretty_url'] = prettify_url(result['url'])

        # TODO, check if timezone is calculated right
        if 'publishedDate' in result:
            result['pubdate'] = result['publishedDate'].strftime(
                '%Y-%m-%d %H:%M:%S%z')
            if result['publishedDate'].replace(
                    tzinfo=None) >= datetime.now() - timedelta(days=1):
                timedifference = datetime.now(
                ) - result['publishedDate'].replace(tzinfo=None)
                minutes = int((timedifference.seconds / 60) % 60)
                hours = int(timedifference.seconds / 60 / 60)
                if hours == 0:
                    result['publishedDate'] = gettext(
                        u'{minutes} minute(s) ago').format(minutes=minutes)
                else:
                    result['publishedDate'] = gettext(
                        u'{hours} hour(s), {minutes} minute(s) ago').format(
                            hours=hours, minutes=minutes)  # noqa
            else:
                result['publishedDate'] = format_date(result['publishedDate'])

    if search.request_data.get('format') == 'json':
        return Response(json.dumps({
            'query':
            search.query,
            'results':
            search.result_container.get_ordered_results()
        }),
                        mimetype='application/json')
    elif search.request_data.get('format') == 'csv':
        csv = UnicodeWriter(cStringIO.StringIO())
        keys = ('title', 'url', 'content', 'host', 'engine', 'score')
        csv.writerow(keys)
        for row in search.result_container.get_ordered_results():
            row['host'] = row['parsed_url'].netloc
            csv.writerow([row.get(key, '') for key in keys])
        csv.stream.seek(0)
        response = Response(csv.stream.read(), mimetype='application/csv')
        cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
        response.headers.add('Content-Disposition', cont_disp)
        return response
    elif search.request_data.get('format') == 'rss':
        response_rss = render(
            'opensearch_response_rss.xml',
            results=search.result_container.get_ordered_results(),
            q=search.request_data['q'],
            number_of_results=search.result_container.results_length(),
            base_url=get_base_url())
        return Response(response_rss, mimetype='text/xml')

    return render('results.html',
                  results=search.result_container.get_ordered_results(),
                  q=search.request_data['q'],
                  selected_categories=search.categories,
                  paging=search.paging,
                  pageno=search.pageno,
                  base_url=get_base_url(),
                  suggestions=search.result_container.suggestions,
                  answers=search.result_container.answers,
                  infoboxes=search.result_container.infoboxes,
                  theme=get_current_theme_name(),
                  favicons=global_favicons[themes.index(
                      get_current_theme_name())])
Exemplo n.º 6
0
def index():
    """Render index page.

    Supported outputs: html, json, csv, rss.
    """

    if not request.args and not request.form:
        return render("index.html")

    try:
        search = Search(request)
    except:
        return render("index.html")

    if plugins.call("pre_search", request, locals()):
        search.search(request)

    plugins.call("post_search", request, locals())

    for result in search.result_container.get_ordered_results():

        plugins.call("on_result", request, locals())
        if not search.paging and engines[result["engine"]].paging:
            search.paging = True

        if search.request_data.get("format", "html") == "html":
            if "content" in result:
                result["content"] = highlight_content(result["content"], search.query.encode("utf-8"))  # noqa
            result["title"] = highlight_content(result["title"], search.query.encode("utf-8"))
        else:
            if result.get("content"):
                result["content"] = html_to_text(result["content"]).strip()
            # removing html content and whitespace duplications
            result["title"] = " ".join(html_to_text(result["title"]).strip().split())

        result["pretty_url"] = prettify_url(result["url"])

        # TODO, check if timezone is calculated right
        if "publishedDate" in result:
            try:  # test if publishedDate >= 1900 (datetime module bug)
                result["pubdate"] = result["publishedDate"].strftime("%Y-%m-%d %H:%M:%S%z")
            except ValueError:
                result["publishedDate"] = None
            else:
                if result["publishedDate"].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
                    timedifference = datetime.now() - result["publishedDate"].replace(tzinfo=None)
                    minutes = int((timedifference.seconds / 60) % 60)
                    hours = int(timedifference.seconds / 60 / 60)
                    if hours == 0:
                        result["publishedDate"] = gettext(u"{minutes} minute(s) ago").format(minutes=minutes)
                    else:
                        result["publishedDate"] = gettext(u"{hours} hour(s), {minutes} minute(s) ago").format(
                            hours=hours, minutes=minutes
                        )  # noqa
                else:
                    result["publishedDate"] = format_date(result["publishedDate"])

    if search.request_data.get("format") == "json":
        return Response(
            json.dumps({"query": search.query, "results": search.result_container.get_ordered_results()}),
            mimetype="application/json",
        )
    elif search.request_data.get("format") == "csv":
        csv = UnicodeWriter(cStringIO.StringIO())
        keys = ("title", "url", "content", "host", "engine", "score")
        csv.writerow(keys)
        for row in search.result_container.get_ordered_results():
            row["host"] = row["parsed_url"].netloc
            csv.writerow([row.get(key, "") for key in keys])
        csv.stream.seek(0)
        response = Response(csv.stream.read(), mimetype="application/csv")
        cont_disp = "attachment;Filename=searx_-_{0}.csv".format(search.query.encode("utf-8"))
        response.headers.add("Content-Disposition", cont_disp)
        return response
    elif search.request_data.get("format") == "rss":
        response_rss = render(
            "opensearch_response_rss.xml",
            results=search.result_container.get_ordered_results(),
            q=search.request_data["q"],
            number_of_results=search.result_container.results_length(),
            base_url=get_base_url(),
        )
        return Response(response_rss, mimetype="text/xml")

    return render(
        "results.html",
        results=search.result_container.get_ordered_results(),
        q=search.request_data["q"],
        selected_categories=search.categories,
        paging=search.paging,
        pageno=search.pageno,
        base_url=get_base_url(),
        suggestions=search.result_container.suggestions,
        answers=search.result_container.answers,
        infoboxes=search.result_container.infoboxes,
        theme=get_current_theme_name(),
        favicons=global_favicons[themes.index(get_current_theme_name())],
    )