Exemple #1
0
 def __init__(self, search_query):
     # init vars
     super().__init__()
     self.search_query = search_query
     self.result_container = ResultContainer()
     self.start_time = None
     self.actual_timeout = None
Exemple #2
0
 def search(self, search_query: SearchQuery) -> ResultContainer:
     result_container = ResultContainer()
     engineref_category = search_query.engineref_list[0].category
     params = self.processor.get_params(search_query, engineref_category)
     if params is not None:
         self.processor.search(search_query.query, params, result_container, time(), 5)
     return result_container
Exemple #3
0
 def search(self, search_query: SearchQuery) -> ResultContainer:
     result_container = ResultContainer()
     engineref_category = search_query.engineref_list[0].category
     params = self.processor.get_params(search_query, engineref_category)
     if params is not None:
         counter_inc('engine', search_query.engineref_list[0].name, 'search', 'count', 'sent')
         self.processor.search(search_query.query, params, result_container, default_timer(), 5)
     return result_container
Exemple #4
0
 def test_result_merge(self):
     c = ResultContainer()
     c.extend('wikipedia', [fake_result()])
     c.extend('wikidata',
              [fake_result(),
               fake_result(url='https://example.com/')])
     self.assertEqual(c.results_length(), 2)
 def search(self, search_query: SearchQuery) -> ResultContainer:
     result_container = ResultContainer()
     engineref_category = search_query.engineref_list[0].category
     params = self.processor.get_params(search_query, engineref_category)
     if params is not None:
         with threading.RLock():
             self.processor.engine.stats['sent_search_count'] += 1
         self.processor.search(search_query.query, params, result_container, time(), 5)
     return result_container
Exemple #6
0
class Search:
    """Search information container"""

    __slots__ = "search_query", "result_container", "start_time", "actual_timeout"

    def __init__(self, search_query):
        # init vars
        super().__init__()
        self.search_query = search_query
        self.result_container = ResultContainer()
        self.start_time = None
        self.actual_timeout = None

    def search_external_bang(self):
        """
        Check if there is a external bang.
        If yes, update self.result_container and return True
        """
        if self.search_query.external_bang:
            self.result_container.redirect_url = get_bang_url(self.search_query)

            # This means there was a valid bang and the
            # rest of the search does not need to be continued
            if isinstance(self.result_container.redirect_url, str):
                return True
        return False

    def search_answerers(self):
        """
        Check if an answer return a result.
        If yes, update self.result_container and return True
        """
        answerers_results = ask(self.search_query)

        if answerers_results:
            for results in answerers_results:
                self.result_container.extend('answer', results)
            return True
        return False

    # do search-request
    def _get_requests(self):
        # init vars
        requests = []

        # max of all selected engine timeout
        default_timeout = 0

        # start search-reqest for all selected engines
        for engineref in self.search_query.engineref_list:
            processor = processors[engineref.name]

            # stop the request now if the engine is suspend
            if processor.extend_container_if_suspended(self.result_container):
                continue

            # set default request parameters
            request_params = processor.get_params(self.search_query, engineref.category)
            if request_params is None:
                continue

            counter_inc('engine', engineref.name, 'search', 'count', 'sent')

            # append request to list
            requests.append((engineref.name, self.search_query.query, request_params))

            # update default_timeout
            default_timeout = max(default_timeout, processor.engine.timeout)

        # adjust timeout
        actual_timeout = default_timeout
        query_timeout = self.search_query.timeout_limit

        if max_request_timeout is None and query_timeout is None:
            # No max, no user query: default_timeout
            pass
        elif max_request_timeout is None and query_timeout is not None:
            # No max, but user query: From user query except if above default
            actual_timeout = min(default_timeout, query_timeout)
        elif max_request_timeout is not None and query_timeout is None:
            # Max, no user query: Default except if above max
            actual_timeout = min(default_timeout, max_request_timeout)
        elif max_request_timeout is not None and query_timeout is not None:
            # Max & user query: From user query except if above max
            actual_timeout = min(query_timeout, max_request_timeout)

        logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
                     .format(actual_timeout, default_timeout, query_timeout, max_request_timeout))

        return requests, actual_timeout

    def search_multiple_requests(self, requests):
        search_id = uuid4().__str__()

        for engine_name, query, request_params in requests:
            th = threading.Thread(
                target=processors[engine_name].search,
                args=(query, request_params, self.result_container, self.start_time, self.actual_timeout),
                name=search_id,
            )
            th._timeout = False
            th._engine_name = engine_name
            th.start()

        for th in threading.enumerate():
            if th.name == search_id:
                remaining_time = max(0.0, self.actual_timeout - (default_timer() - self.start_time))
                th.join(remaining_time)
                if th.is_alive():
                    th._timeout = True
                    self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')
                    logger.warning('engine timeout: {0}'.format(th._engine_name))

    def search_standard(self):
        """
        Update self.result_container, self.actual_timeout
        """
        requests, self.actual_timeout = self._get_requests()

        # send all search-request
        if requests:
            self.search_multiple_requests(requests)
            start_new_thread(gc.collect, tuple())

        # return results, suggestions, answers and infoboxes
        return True

    # do search-request
    def search(self):
        self.start_time = default_timer()
        if not self.search_external_bang():
            if not self.search_answerers():
                self.search_standard()
        return self.result_container
Exemple #7
0
    def __init__(self, request):
        # init vars
        super(Search, self).__init__()
        self.query = None
        self.engines = []
        self.categories = []
        self.paging = False
        self.pageno = 1
        self.lang = 'all'

        # set blocked engines
        self.blocked_engines = request.preferences.engines.get_disabled()

        self.result_container = ResultContainer()
        self.request_data = {}

        # set specific language if set
        self.lang = request.preferences.get_value('language')

        # set request method
        if request.method == 'POST':
            self.request_data = request.form
        else:
            self.request_data = request.args

        # TODO better exceptions
        if not self.request_data.get('q'):
            raise Exception('noquery')

        # set pagenumber
        pageno_param = self.request_data.get('pageno', '1')
        if not pageno_param.isdigit() or int(pageno_param) < 1:
            pageno_param = 1

        self.pageno = int(pageno_param)

        # parse query, if tags are set, which change
        # the serch engine or search-language
        query_obj = Query(self.request_data['q'], self.blocked_engines)
        query_obj.parse_query()

        # set query
        self.query = query_obj.getSearchQuery()

        # get last selected language in query, if possible
        # TODO support search with multible languages
        if len(query_obj.languages):
            self.lang = query_obj.languages[-1]

        self.engines = query_obj.engines

        self.categories = []

        # if engines are calculated from query,
        # set categories by using that informations
        if self.engines and query_obj.specific:
            self.categories = list(
                set(engine['category'] for engine in self.engines))

        # otherwise, using defined categories to
        # calculate which engines should be used
        else:
            # set categories/engines
            load_default_categories = True
            for pd_name, pd in self.request_data.items():
                if pd_name == 'categories':
                    self.categories.extend(
                        categ for categ in map(unicode.strip, pd.split(','))
                        if categ in categories)
                elif pd_name == 'engines':
                    pd_engines = [{
                        'category': engines[engine].categories[0],
                        'name': engine
                    } for engine in map(unicode.strip, pd.split(','))
                                  if engine in engines]
                    if pd_engines:
                        self.engines.extend(pd_engines)
                        load_default_categories = False
                elif pd_name.startswith('category_'):
                    category = pd_name[9:]

                    # if category is not found in list, skip
                    if category not in categories:
                        continue

                    if pd != 'off':
                        # add category to list
                        self.categories.append(category)
                    elif category in self.categories:
                        # remove category from list if property is set to 'off'
                        self.categories.remove(category)

            if not load_default_categories:
                if not self.categories:
                    self.categories = list(
                        set(engine['category'] for engine in self.engines))
                return

            # if no category is specified for this search,
            # using user-defined default-configuration which
            # (is stored in cookie)
            if not self.categories:
                cookie_categories = request.cookies.get('categories', '')
                cookie_categories = cookie_categories.split(',')
                for ccateg in cookie_categories:
                    if ccateg in categories:
                        self.categories.append(ccateg)

            # if still no category is specified, using general
            # as default-category
            if not self.categories:
                self.categories = ['general']

            # using all engines for that search, which are
            # declared under the specific categories
            for categ in self.categories:
                self.engines.extend({
                    'category': categ,
                    'name': engine.name
                } for engine in categories[categ]
                                    if (engine.name,
                                        categ) not in self.blocked_engines)

        # remove suspended engines
        self.engines = [
            e for e in self.engines
            if engines[e['name']].suspend_end_time <= time()
        ]
Exemple #8
0
 def test_result_merge(self):
     c = ResultContainer()
     c.extend('wikipedia', [fake_result()])
     c.extend('wikidata', [fake_result(), fake_result(url='https://example.com/')])
     self.assertEqual(c.results_length(), 2)
Exemple #9
0
 def test_one_suggestion(self):
     c = ResultContainer()
     c.extend('wikipedia', [fake_result(suggestion=True)])
     self.assertEqual(len(c.suggestions), 1)
     self.assertEqual(c.results_length(), 0)
Exemple #10
0
 def test_one_result(self):
     c = ResultContainer()
     c.extend('wikipedia', [fake_result()])
     self.assertEqual(c.results_length(), 1)
Exemple #11
0
class Search:
    """Search information container"""
    def __init__(self, search_query):
        # init vars
        super().__init__()
        self.search_query = search_query
        self.result_container = ResultContainer()
        self.start_time = None
        self.actual_timeout = None

    def search_external_bang(self):
        """
        Check if there is a external bang.
        If yes, update self.result_container and return True
        """
        if self.search_query.external_bang:
            self.result_container.redirect_url = get_bang_url(
                self.search_query)

            # This means there was a valid bang and the
            # rest of the search does not need to be continued
            if isinstance(self.result_container.redirect_url, str):
                return True
        return False

    def search_answerers(self):
        """
        Check if an answer return a result.
        If yes, update self.result_container and return True
        """
        answerers_results = ask(self.search_query)

        if answerers_results:
            for results in answerers_results:
                self.result_container.extend('answer', results)
            return True
        return False

    def _is_accepted(self, engine_name, engine):
        if not self.search_query.preferences.validate_token(engine):
            return False

        # skip suspended engines
        if engine.suspend_end_time >= time():
            logger.debug('Engine currently suspended: %s', engine_name)
            return False

        # if paging is not supported, skip
        if self.search_query.pageno > 1 and not engine.paging:
            return False

        # if time_range is not supported, skip
        if self.search_query.time_range and not engine.time_range_support:
            return False

        return True

    def _get_params(self, selected_engine, user_agent):
        if selected_engine['name'] not in engines:
            return None, None

        engine = engines[selected_engine['name']]

        if not self._is_accepted(selected_engine['name'], engine):
            return None, None

        # set default request parameters
        request_params = {}
        if not engine.offline:
            request_params = default_request_params()
            request_params['headers']['User-Agent'] = user_agent

            if hasattr(engine, 'language') and engine.language:
                request_params['language'] = engine.language
            else:
                request_params['language'] = self.search_query.lang

            request_params['safesearch'] = self.search_query.safesearch
            request_params['time_range'] = self.search_query.time_range

        request_params['category'] = selected_engine['category']
        request_params['pageno'] = self.search_query.pageno

        return request_params, engine.timeout

    # do search-request
    def _get_requests(self):
        global number_of_searches

        # init vars
        requests = []

        # set default useragent
        # user_agent = request.headers.get('User-Agent', '')
        user_agent = gen_useragent()

        # max of all selected engine timeout
        default_timeout = 0

        # start search-reqest for all selected engines
        for selected_engine in self.search_query.engines:
            # set default request parameters
            request_params, engine_timeout = self._get_params(
                selected_engine, user_agent)
            if request_params is None:
                continue

            # append request to list
            requests.append((selected_engine['name'], self.search_query.query,
                             request_params))

            # update default_timeout
            default_timeout = max(default_timeout, engine_timeout)

        # adjust timeout
        actual_timeout = default_timeout
        query_timeout = self.search_query.timeout_limit

        if max_request_timeout is None and query_timeout is None:
            # No max, no user query: default_timeout
            pass
        elif max_request_timeout is None and query_timeout is not None:
            # No max, but user query: From user query except if above default
            actual_timeout = min(default_timeout, query_timeout)
        elif max_request_timeout is not None and query_timeout is None:
            # Max, no user query: Default except if above max
            actual_timeout = min(default_timeout, max_request_timeout)
        elif max_request_timeout is not None and query_timeout is not None:
            # Max & user query: From user query except if above max
            actual_timeout = min(query_timeout, max_request_timeout)

        logger.debug(
            "actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
            .format(self.actual_timeout, default_timeout, query_timeout,
                    max_request_timeout))

        return requests, actual_timeout

    def search_standard(self):
        """
        Update self.result_container, self.actual_timeout
        """
        requests, self.actual_timeout = self._get_requests()

        # send all search-request
        if requests:
            search_multiple_requests(requests, self.result_container,
                                     self.start_time, self.actual_timeout)
            start_new_thread(gc.collect, tuple())

        # return results, suggestions, answers and infoboxes
        return True

    # do search-request
    def search(self):
        self.start_time = time()

        if not self.search_external_bang():
            if not self.search_answerers():
                self.search_standard()

        return self.result_container
Exemple #12
0
 def __init__(self, search_query):
     # init vars
     super(Search, self).__init__()
     self.search_query = search_query
     self.result_container = ResultContainer()
Exemple #13
0
class Search(object):

    """Search information container"""

    def __init__(self, search_query):
        # init vars
        super(Search, self).__init__()
        self.search_query = search_query
        self.result_container = ResultContainer()

    # do search-request
    def search(self):
        global number_of_searches

        # start time
        start_time = time()

        # answeres ?
        answerers_results = ask(self.search_query)

        if answerers_results:
            for results in answerers_results:
                self.result_container.extend('answer', results)
            return self.result_container

        # init vars
        requests = []

        # increase number of searches
        number_of_searches += 1

        # set default useragent
        # user_agent = request.headers.get('User-Agent', '')
        user_agent = gen_useragent()

        search_query = self.search_query

        # max of all selected engine timeout
        timeout_limit = 0

        # start search-reqest for all selected engines
        for selected_engine in search_query.engines:
            if selected_engine['name'] not in engines:
                continue

            engine = engines[selected_engine['name']]

            # skip suspended engines
            if engine.suspend_end_time >= time():
                logger.debug('Engine currently suspended: %s', selected_engine['name'])
                continue

            # if paging is not supported, skip
            if search_query.pageno > 1 and not engine.paging:
                continue

            # if time_range is not supported, skip
            if search_query.time_range and not engine.time_range_support:
                continue

            # set default request parameters
            request_params = default_request_params()
            request_params['headers']['User-Agent'] = user_agent
            request_params['category'] = selected_engine['category']
            request_params['pageno'] = search_query.pageno

            if hasattr(engine, 'language') and engine.language:
                request_params['language'] = engine.language
            else:
                request_params['language'] = search_query.lang

            # 0 = None, 1 = Moderate, 2 = Strict
            request_params['safesearch'] = search_query.safesearch
            request_params['time_range'] = search_query.time_range

            # append request to list
            requests.append((selected_engine['name'], search_query.query.encode('utf-8'), request_params))

            # update timeout_limit
            timeout_limit = max(timeout_limit, engine.timeout)

        if requests:
            # send all search-request
            search_multiple_requests(requests, self.result_container, start_time, timeout_limit)
            start_new_thread(gc.collect, tuple())

        # return results, suggestions, answers and infoboxes
        return self.result_container
Exemple #14
0
class Search(object):

    """Search information container"""

    def __init__(self, search_query):
        # init vars
        super(Search, self).__init__()
        self.search_query = search_query
        self.result_container = ResultContainer()
        self.actual_timeout = None

    # do search-request
    def search(self):
        global number_of_searches

        # start time
        start_time = time()

        # answeres ?
        answerers_results = ask(self.search_query)

        if answerers_results:
            for results in answerers_results:
                self.result_container.extend('answer', results)
            return self.result_container

        # init vars
        requests = []

        # increase number of searches
        number_of_searches += 1

        # set default useragent
        # user_agent = request.headers.get('User-Agent', '')
        user_agent = gen_useragent()

        search_query = self.search_query

        # max of all selected engine timeout
        default_timeout = 0

        # start search-reqest for all selected engines
        for selected_engine in search_query.engines:
            if selected_engine['name'] not in engines:
                continue

            engine = engines[selected_engine['name']]

            # skip suspended engines
            if engine.suspend_end_time >= time():
                logger.debug('Engine currently suspended: %s', selected_engine['name'])
                continue

            # if paging is not supported, skip
            if search_query.pageno > 1 and not engine.paging:
                continue

            # if time_range is not supported, skip
            if search_query.time_range and not engine.time_range_support:
                continue

            # set default request parameters
            request_params = {}
            if not engine.offline:
                request_params = default_request_params()
                request_params['headers']['User-Agent'] = user_agent

                if hasattr(engine, 'language') and engine.language:
                    request_params['language'] = engine.language
                else:
                    request_params['language'] = search_query.lang

                request_params['safesearch'] = search_query.safesearch
                request_params['time_range'] = search_query.time_range

            request_params['category'] = selected_engine['category']
            request_params['pageno'] = search_query.pageno

            # append request to list
            requests.append((selected_engine['name'], search_query.query, request_params))

            # update default_timeout
            default_timeout = max(default_timeout, engine.timeout)

        # adjust timeout
        self.actual_timeout = default_timeout
        query_timeout = self.search_query.timeout_limit

        if max_request_timeout is None and query_timeout is None:
            # No max, no user query: default_timeout
            pass
        elif max_request_timeout is None and query_timeout is not None:
            # No max, but user query: From user query except if above default
            self.actual_timeout = min(default_timeout, query_timeout)
        elif max_request_timeout is not None and query_timeout is None:
            # Max, no user query: Default except if above max
            self.actual_timeout = min(default_timeout, max_request_timeout)
        elif max_request_timeout is not None and query_timeout is not None:
            # Max & user query: From user query except if above max
            self.actual_timeout = min(query_timeout, max_request_timeout)

        logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
                     .format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout))

        # send all search-request
        if requests:
            search_multiple_requests(requests, self.result_container, start_time, self.actual_timeout)
            start_new_thread(gc.collect, tuple())

        # return results, suggestions, answers and infoboxes
        return self.result_container
Exemple #15
0
class Search(object):
    """Search information container"""
    def __init__(self, search_query):
        # init vars
        super(Search, self).__init__()
        self.search_query = search_query
        self.result_container = ResultContainer()

    # do search-request
    def search(self):
        global number_of_searches

        answerers_results = ask(self.search_query)

        if answerers_results:
            for results in answerers_results:
                self.result_container.extend('answer', results)
            return self.result_container

        # init vars
        requests = []

        # increase number of searches
        number_of_searches += 1

        # set default useragent
        # user_agent = request.headers.get('User-Agent', '')
        user_agent = gen_useragent()

        search_query = self.search_query

        # start search-reqest for all selected engines
        for selected_engine in search_query.engines:
            if selected_engine['name'] not in engines:
                continue

            engine = engines[selected_engine['name']]

            # skip suspended engines
            if engine.suspend_end_time >= time():
                logger.debug('Engine currently suspended: %s',
                             selected_engine['name'])
                continue

            # if paging is not supported, skip
            if search_query.pageno > 1 and not engine.paging:
                continue

            # if search-language is set and engine does not
            # provide language-support, skip
            if search_query.lang != 'all' and not engine.language_support:
                continue

            # if time_range is not supported, skip
            if search_query.time_range and not engine.time_range_support:
                continue

            # set default request parameters
            request_params = default_request_params()
            request_params['headers']['User-Agent'] = user_agent
            request_params['category'] = selected_engine['category']
            request_params['started'] = time()
            request_params['pageno'] = search_query.pageno

            if hasattr(engine, 'language') and engine.language:
                request_params['language'] = engine.language
            else:
                request_params['language'] = search_query.lang

            # 0 = None, 1 = Moderate, 2 = Strict
            request_params['safesearch'] = search_query.safesearch
            request_params['time_range'] = search_query.time_range

            # update request parameters dependent on
            # search-engine (contained in engines folder)
            engine.request(search_query.query.encode('utf-8'), request_params)

            if request_params['url'] is None:
                # TODO add support of offline engines
                pass

            # create a callback wrapper for the search engine results
            callback = make_callback(selected_engine['name'], engine.response,
                                     request_params, self.result_container)

            # create dictionary which contain all
            # informations about the request
            request_args = dict(headers=request_params['headers'],
                                hooks=dict(response=callback),
                                cookies=request_params['cookies'],
                                timeout=engine.timeout,
                                verify=request_params['verify'])

            # specific type of request (GET or POST)
            if request_params['method'] == 'GET':
                req = requests_lib.get
            else:
                req = requests_lib.post
                request_args['data'] = request_params['data']

            # ignoring empty urls
            if not request_params['url']:
                continue

            # append request to list
            requests.append((req, request_params['url'], request_args,
                             selected_engine['name']))

        if not requests:
            return self.result_container
        # send all search-request
        threaded_requests(requests)
        start_new_thread(gc.collect, tuple())

        # return results, suggestions, answers and infoboxes
        return self.result_container
Exemple #16
0
class Search(object):

    """Search information container"""

    def __init__(self, search_query):
        # init vars
        super(Search, self).__init__()
        self.search_query = search_query
        self.result_container = ResultContainer()

    # do search-request
    def search(self):
        global number_of_searches

        # start time
        start_time = time()

        # answeres ?
        answerers_results = ask(self.search_query)

        if answerers_results:
            for results in answerers_results:
                self.result_container.extend('answer', results)
            return self.result_container

        # init vars
        requests = []

        # increase number of searches
        number_of_searches += 1

        # set default useragent
        # user_agent = request.headers.get('User-Agent', '')
        user_agent = gen_useragent()

        search_query = self.search_query

        # max of all selected engine timeout
        timeout_limit = 0

        # start search-reqest for all selected engines
        for selected_engine in search_query.engines:
            if selected_engine['name'] not in engines:
                continue

            engine = engines[selected_engine['name']]

            # skip suspended engines
            if engine.suspend_end_time >= time():
                logger.debug('Engine currently suspended: %s', selected_engine['name'])
                continue

            # if paging is not supported, skip
            if search_query.pageno > 1 and not engine.paging:
                continue

            # if time_range is not supported, skip
            if search_query.time_range and not engine.time_range_support:
                continue

            # set default request parameters
            request_params = default_request_params()
            request_params['headers']['User-Agent'] = user_agent
            request_params['category'] = selected_engine['category']
            request_params['pageno'] = search_query.pageno

            if hasattr(engine, 'language') and engine.language:
                request_params['language'] = engine.language
            else:
                request_params['language'] = search_query.lang

            # 0 = None, 1 = Moderate, 2 = Strict
            request_params['safesearch'] = search_query.safesearch
            request_params['time_range'] = search_query.time_range

            # append request to list
            requests.append((selected_engine['name'], search_query.query, request_params))

            # update timeout_limit
            timeout_limit = max(timeout_limit, engine.timeout)

        if requests:
            # send all search-request
            search_multiple_requests(requests, self.result_container, start_time, timeout_limit)
            start_new_thread(gc.collect, tuple())

        # return results, suggestions, answers and infoboxes
        return self.result_container
Exemple #17
0
 def __init__(self, search_query):
     # init vars
     super(Search, self).__init__()
     self.search_query = search_query
     self.result_container = ResultContainer()
Exemple #18
0
 def test_empty(self):
     c = ResultContainer()
     self.assertEqual(c.get_ordered_results(), [])