def run(): if not running.acquire(blocking=False): return try: logger.info('Starting checker') result = {'status': 'ok', 'engines': {}} for name, processor in processors.items(): logger.debug('Checking %s engine', name) checker = Checker(processor) checker.run() if checker.test_results.succesfull: result['engines'][name] = {'success': True} else: result['engines'][name] = { 'success': False, 'errors': checker.test_results.errors } _set_result(result) logger.info('Check done') except Exception: _set_result({'status': 'error'}) logger.exception('Error while running the checker') finally: running.release()
def load_https_rules(rules_path): # check if directory exists if not isdir(rules_path): logger.error("directory not found: '" + rules_path + "'") return # search all xml files which are stored in the https rule directory xml_files = [ join(rules_path, f) for f in listdir(rules_path) if isfile(join(rules_path, f)) and f[-4:] == '.xml' ] # load xml-files for ruleset_file in xml_files: # calculate rewrite-rules ruleset = load_single_https_ruleset(ruleset_file) # skip if no ruleset returned if not ruleset: continue # append ruleset https_rules.append(ruleset) logger.info('{n} rules loaded'.format(n=len(https_rules)))
def request(query, params): query = urlencode({"q": query})[2:] parsed_query = query matches = [x for x in keywords if x in query.lower()] if len(matches) > 0: parsed_query = query.replace(matches[0], "") params["method"] = "POST" params["url"] = search_url params["data"]["filters"] = {"name": parsed_query} logger.info(params) return params
def initialize(): logger.info('Send SIGUSR1 signal to pid %i to start the checker', os.getpid()) signal.signal(signal.SIGUSR1, _signal_handler) # disabled by default _set_result({'status': 'disabled'}, include_timestamp=False) # special case when debug is activate if searx_debug and settings.get('checker', {}).get('off_when_debug', True): logger.info('debug mode: checker is disabled') return # check value of checker.scheduling.every now scheduling = settings.get('checker', {}).get('scheduling', None) if scheduling is None or not scheduling: logger.info('Checker scheduler is disabled') return # _set_result({'status': 'unknown'}, include_timestamp=False) start_after = scheduling.get('start_after', (300, 1800)) start_after = _get_interval( start_after, 'checker.scheduling.start_after is not a int or list') delay = random.randint(start_after[0], start_after[1]) logger.info('Start checker in %i seconds', delay) t = threading.Timer(delay, _start_scheduling) t.daemon = True t.start()
from searx.answerers import ask from searx.external_bang import get_bang_url from searx.results import ResultContainer from searx import logger from searx.plugins import plugins from searx.search.models import EngineRef, SearchQuery from searx.search.processors import processors, initialize as initialize_processors from searx.search.checker import initialize as initialize_checker from searx.metrics import initialize as initialize_metrics, counter_inc, histogram_observe_time logger = logger.getChild('search') max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None) if max_request_timeout is None: logger.info('max_request_timeout={0}'.format(max_request_timeout)) else: if isinstance(max_request_timeout, float): logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout)) else: logger.critical('outgoing.max_request_timeout if defined has to be float') import sys sys.exit(1) def initialize(settings_engines=None, enable_checker=False): settings_engines = settings_engines or settings['engines'] initialize_processors(settings_engines) initialize_metrics([engine['name'] for engine in settings_engines]) if enable_checker: initialize_checker()
from searx.results import ResultContainer from searx import logger from searx.plugins import plugins from searx.exceptions import ( SearxEngineAccessDeniedException, SearxEngineCaptchaException, SearxEngineTooManyRequestsException, ) from searx.metrology.error_recorder import record_exception, record_error logger = logger.getChild("search") max_request_timeout = settings.get("outgoing", {}).get("max_request_timeout" or None) if max_request_timeout is None: logger.info("max_request_timeout={0}".format(max_request_timeout)) else: if isinstance(max_request_timeout, float): logger.info("max_request_timeout={0} second(s)".format(max_request_timeout)) else: logger.critical("outgoing.max_request_timeout if defined has to be float") import sys sys.exit(1) class EngineRef: __slots__ = "name", "category", "from_bang" def __init__(self, name: str, category: str, from_bang: bool = False):