def search_answerers(self): """ Check if an answer return a result. If yes, update self.result_container and return True """ answerers_results = ask(self.search_query) if answerers_results: for results in answerers_results: self.result_container.extend('answer', results) return True return False
def autocompleter(): """Return autocompleter results""" # run autocompleter results = [] # set blocked engines disabled_engines = request.preferences.engines.get_disabled() # parse query raw_text_query = RawTextQuery(request.form.get('q', ''), disabled_engines) sug_prefix = raw_text_query.getQuery() # normal autocompletion results only appear if no inner results returned # and there is a query part if len(raw_text_query.autocomplete_list) == 0 and len(sug_prefix) > 0: # get language from cookie language = request.preferences.get_value('language') if not language or language == 'all': language = 'en' else: language = language.split('-')[0] # run autocompletion raw_results = search_autocomplete( request.preferences.get_value('autocomplete'), sug_prefix, language) for result in raw_results: # attention: this loop will change raw_text_query object and this is # the reason why the sug_prefix was stored before (see above) results.append(raw_text_query.changeQuery(result).getFullQuery()) if len(raw_text_query.autocomplete_list) > 0: for autocomplete_text in raw_text_query.autocomplete_list: results.append( raw_text_query.get_autocomplete_full_query(autocomplete_text)) for answers in ask(raw_text_query): for answer in answers: results.append(str(answer['answer'])) if request.headers.get('X-Requested-With') == 'XMLHttpRequest': # the suggestion request comes from the searx search form suggestions = json.dumps(results) mimetype = 'application/json' else: # the suggestion request comes from browser's URL bar suggestions = json.dumps([sug_prefix, results]) mimetype = 'application/x-suggestions+json' return Response(suggestions, mimetype=mimetype)
def autocompleter(): """Return autocompleter results""" # run autocompleter results = [] # set blocked engines disabled_engines = request.preferences.engines.get_disabled() # parse query raw_text_query = RawTextQuery(request.form.get('q', ''), disabled_engines) # normal autocompletion results only appear if no inner results returned # and there is a query part if len(raw_text_query.autocomplete_list) == 0 and len( raw_text_query.getQuery()) > 0: # get language from cookie language = request.preferences.get_value('language') if not language or language == 'all': language = 'en' else: language = language.split('-')[0] # run autocompletion raw_results = search_autocomplete( request.preferences.get_value('autocomplete'), raw_text_query.getQuery(), language) for result in raw_results: results.append(raw_text_query.changeQuery(result).getFullQuery()) if len(raw_text_query.autocomplete_list) > 0: for autocomplete_text in raw_text_query.autocomplete_list: results.append( raw_text_query.get_autocomplete_full_query(autocomplete_text)) for answers in ask(raw_text_query): for answer in answers: results.append(str(answer['answer'])) # return autocompleter results if request.headers.get('X-Requested-With') == 'XMLHttpRequest': return Response(json.dumps(results), mimetype='application/json') return Response(json.dumps([raw_text_query.query, results]), mimetype='application/x-suggestions+json')
def search(self): global number_of_searches # start time start_time = time() # answeres ? answerers_results = ask(self.search_query) if answerers_results: for results in answerers_results: self.result_container.extend('answer', results) return self.result_container # init vars requests = [] # increase number of searches number_of_searches += 1 # set default useragent # user_agent = request.headers.get('User-Agent', '') user_agent = gen_useragent() search_query = self.search_query # max of all selected engine timeout timeout_limit = 0 # start search-reqest for all selected engines for selected_engine in search_query.engines: if selected_engine['name'] not in engines: continue engine = engines[selected_engine['name']] # skip suspended engines if engine.suspend_end_time >= time(): logger.debug('Engine currently suspended: %s', selected_engine['name']) continue # if paging is not supported, skip if search_query.pageno > 1 and not engine.paging: continue # if time_range is not supported, skip if search_query.time_range and not engine.time_range_support: continue # set default request parameters request_params = default_request_params() request_params['headers']['User-Agent'] = user_agent request_params['category'] = selected_engine['category'] request_params['pageno'] = search_query.pageno if hasattr(engine, 'language') and engine.language: request_params['language'] = engine.language else: request_params['language'] = search_query.lang # 0 = None, 1 = Moderate, 2 = Strict request_params['safesearch'] = search_query.safesearch request_params['time_range'] = search_query.time_range # append request to list requests.append((selected_engine['name'], search_query.query, request_params)) # update timeout_limit timeout_limit = max(timeout_limit, engine.timeout) if requests: # send all search-request search_multiple_requests(requests, self.result_container, start_time, timeout_limit) start_new_thread(gc.collect, tuple()) # return results, suggestions, answers and infoboxes return self.result_container
def search(self): global number_of_searches # Check if there is a external bang. After that we can stop because the search will terminate. if self.search_query.external_bang: self.result_container.redirect_url = get_bang_url( self.search_query) # This means there was a valid bang and the # rest of the search does not need to be continued if isinstance(self.result_container.redirect_url, str): return self.result_container # start time start_time = time() # answeres ? answerers_results = ask(self.search_query) if answerers_results: for results in answerers_results: self.result_container.extend('answer', results) return self.result_container # init vars requests = [] # increase number of searches number_of_searches += 1 # set default useragent # user_agent = request.headers.get('User-Agent', '') user_agent = gen_useragent() search_query = self.search_query # max of all selected engine timeout default_timeout = 0 # start search-reqest for all selected engines for selected_engine in search_query.engines: if selected_engine['name'] not in engines: continue engine = engines[selected_engine['name']] if not search_query.preferences.validate_token(engine): continue # skip suspended engines if engine.suspend_end_time >= time(): logger.debug('Engine currently suspended: %s', selected_engine['name']) continue # if paging is not supported, skip if search_query.pageno > 1 and not engine.paging: continue # if time_range is not supported, skip if search_query.time_range and not engine.time_range_support: continue # set default request parameters request_params = {} if not engine.offline: request_params = default_request_params() request_params['headers']['User-Agent'] = user_agent if hasattr(engine, 'language') and engine.language: request_params['language'] = engine.language else: request_params['language'] = search_query.lang request_params['safesearch'] = search_query.safesearch request_params['time_range'] = search_query.time_range request_params['category'] = selected_engine['category'] request_params['pageno'] = search_query.pageno # append request to list requests.append( (selected_engine['name'], search_query.query, request_params)) # update default_timeout default_timeout = max(default_timeout, engine.timeout) # adjust timeout self.actual_timeout = default_timeout query_timeout = self.search_query.timeout_limit if max_request_timeout is None and query_timeout is None: # No max, no user query: default_timeout pass elif max_request_timeout is None and query_timeout is not None: # No max, but user query: From user query except if above default self.actual_timeout = min(default_timeout, query_timeout) elif max_request_timeout is not None and query_timeout is None: # Max, no user query: Default except if above max self.actual_timeout = min(default_timeout, max_request_timeout) elif max_request_timeout is not None and query_timeout is not None: # Max & user query: From user query except if above max self.actual_timeout = min(query_timeout, max_request_timeout) logger.debug( "actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})" .format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout)) # send all search-request if requests: search_multiple_requests(requests, self.result_container, start_time, self.actual_timeout) start_new_thread(gc.collect, tuple()) # return results, suggestions, answers and infoboxes return self.result_container
def search(self): global number_of_searches # start time start_time = time() # answeres ? answerers_results = ask(self.search_query) if answerers_results: for results in answerers_results: self.result_container.extend('answer', results) return self.result_container # init vars requests = [] # increase number of searches number_of_searches += 1 # set default useragent # user_agent = request.headers.get('User-Agent', '') user_agent = gen_useragent() search_query = self.search_query # max of all selected engine timeout timeout_limit = 0 # start search-reqest for all selected engines for selected_engine in search_query.engines: if selected_engine['name'] not in engines: continue engine = engines[selected_engine['name']] # skip suspended engines if engine.suspend_end_time >= time(): logger.debug('Engine currently suspended: %s', selected_engine['name']) continue # if paging is not supported, skip if search_query.pageno > 1 and not engine.paging: continue # if time_range is not supported, skip if search_query.time_range and not engine.time_range_support: continue # set default request parameters request_params = default_request_params() request_params['headers']['User-Agent'] = user_agent request_params['category'] = selected_engine['category'] request_params['pageno'] = search_query.pageno if hasattr(engine, 'language') and engine.language: request_params['language'] = engine.language else: request_params['language'] = search_query.lang # 0 = None, 1 = Moderate, 2 = Strict request_params['safesearch'] = search_query.safesearch request_params['time_range'] = search_query.time_range # append request to list requests.append((selected_engine['name'], search_query.query.encode('utf-8'), request_params)) # update timeout_limit timeout_limit = max(timeout_limit, engine.timeout) if requests: # send all search-request search_multiple_requests(requests, self.result_container, start_time, timeout_limit) start_new_thread(gc.collect, tuple()) # return results, suggestions, answers and infoboxes return self.result_container
def search(self): global number_of_searches answerers_results = ask(self.search_query) if answerers_results: for results in answerers_results: self.result_container.extend('answer', results) return self.result_container # init vars requests = [] # increase number of searches number_of_searches += 1 # set default useragent # user_agent = request.headers.get('User-Agent', '') user_agent = gen_useragent() search_query = self.search_query # start search-reqest for all selected engines for selected_engine in search_query.engines: if selected_engine['name'] not in engines: continue engine = engines[selected_engine['name']] # skip suspended engines if engine.suspend_end_time >= time(): logger.debug('Engine currently suspended: %s', selected_engine['name']) continue # if paging is not supported, skip if search_query.pageno > 1 and not engine.paging: continue # if search-language is set and engine does not # provide language-support, skip if search_query.lang != 'all' and not engine.language_support: continue # if time_range is not supported, skip if search_query.time_range and not engine.time_range_support: continue # set default request parameters request_params = default_request_params() request_params['headers']['User-Agent'] = user_agent request_params['category'] = selected_engine['category'] request_params['started'] = time() request_params['pageno'] = search_query.pageno if hasattr(engine, 'language') and engine.language: request_params['language'] = engine.language else: request_params['language'] = search_query.lang # 0 = None, 1 = Moderate, 2 = Strict request_params['safesearch'] = search_query.safesearch request_params['time_range'] = search_query.time_range # update request parameters dependent on # search-engine (contained in engines folder) engine.request(search_query.query.encode('utf-8'), request_params) if request_params['url'] is None: # TODO add support of offline engines pass # create a callback wrapper for the search engine results callback = make_callback(selected_engine['name'], engine.response, request_params, self.result_container) # create dictionary which contain all # informations about the request request_args = dict(headers=request_params['headers'], hooks=dict(response=callback), cookies=request_params['cookies'], timeout=engine.timeout, verify=request_params['verify']) # specific type of request (GET or POST) if request_params['method'] == 'GET': req = requests_lib.get else: req = requests_lib.post request_args['data'] = request_params['data'] # ignoring empty urls if not request_params['url']: continue # append request to list requests.append((req, request_params['url'], request_args, selected_engine['name'])) if not requests: return self.result_container # send all search-request threaded_requests(requests) start_new_thread(gc.collect, tuple()) # return results, suggestions, answers and infoboxes return self.result_container