def test_initialize_engines_include_onions(self): settings['outgoing']['using_tor_proxy'] = True settings['outgoing']['extra_proxy_timeout'] = 100.0 engine_list = [{ 'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1', 'categories': 'general', 'timeout': 20.0, 'onion_url': 'http://engine1.onion' }, { 'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2', 'categories': 'onions' }] engines.initialize_engines(engine_list) self.assertEqual(len(engines.engines), 2) self.assertIn('engine1', engines.engines) self.assertIn('engine2', engines.engines) self.assertIn('onions', engines.categories) self.assertIn('http://engine1.onion', engines.engines['engine1'].search_url) self.assertEqual(engines.engines['engine1'].timeout, 120.0)
def setUpClass(cls): engine_list = [{ 'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1' }] initialize_engines(engine_list)
def initialize(engine_list): engines.initialize_engines(engine_list) for engine_name, engine in engines.engines.items(): processor = get_processor(engine, engine_name) if processor is None: logger.error('Error get processor for engine %s', engine_name) else: processors[engine_name] = processor
def initialize(engine_list): """Initialize all engines and store a processor for each engine in :py:obj:`processors`.""" engines.initialize_engines(engine_list) for engine_name, engine in engines.engines.items(): processor = get_processor(engine, engine_name) if processor is None: logger.error('Error get processor for engine %s', engine_name) else: processors[engine_name] = processor
def fetch_supported_languages(): initialize_engines(settings['engines']) for engine_name in engines: if hasattr(engines[engine_name], 'fetch_supported_languages'): try: engines_languages[engine_name] = engines[engine_name].fetch_supported_languages() except Exception as e: print(e) # write json file with io.open(engines_languages_file, "w", encoding="utf-8") as f: dump(engines_languages, f, ensure_ascii=False)
def fetch_supported_languages(): initialize_engines(settings['engines']) for engine_name in engines: if hasattr(engines[engine_name], 'fetch_supported_languages'): try: engines_languages[engine_name] = engines[engine_name].fetch_supported_languages() except Exception as e: print e # write json file with io.open(engines_languages_file, "w", encoding="utf-8") as f: f.write(unicode(dumps(engines_languages, ensure_ascii=False, encoding="utf-8")))
def test_initialize_engines_default(self): engine_list = [{ 'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1' }, { 'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2' }] engines.initialize_engines(engine_list) self.assertEqual(len(engines.engines), 2) self.assertIn('engine1', engines.engines) self.assertIn('engine2', engines.engines)
def test_initialize_engines_exclude_onions(self): settings['outgoing']['using_tor_proxy'] = False engine_list = [{ 'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1', 'categories': 'general' }, { 'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2', 'categories': 'onions' }] engines.initialize_engines(engine_list) self.assertEqual(len(engines.engines), 1) self.assertIn('engine1', engines.engines) self.assertNotIn('onions', engines.categories)
previous_code = current_code # Write languages.py. def write_languages_file(languages): new_file = open(languages_file, 'wb') file_content = '# -*- coding: utf-8 -*-\n'\ + '# list of language codes\n'\ + '# this file is generated automatically by utils/update_search_languages.py\n'\ + '\nlanguage_codes = (' for code in sorted(languages): file_content += '\n (u"' + code + '"'\ + ', u"' + languages[code]['name'].split(' (')[0] + '"'\ + ', u"' + languages[code].get('country', '') + '"'\ + ', u"' + languages[code].get('english_name', '').split(' (')[0] + '"),' # remove last comma file_content = file_content[:-1] file_content += '\n)\n' new_file.write(file_content.encode('utf8')) new_file.close() if __name__ == "__main__": initialize_engines(settings['engines']) engines_languages = fetch_supported_languages() all_languages = join_language_lists(engines_languages) filtered_languages = filter_language_list(all_languages) assign_country_codes(filtered_languages, all_languages) write_languages_file(filtered_languages)
# Flask app app = Flask( __name__, static_folder=static_path, template_folder=templates_path ) app.jinja_env.trim_blocks = True app.jinja_env.lstrip_blocks = True app.secret_key = settings['server']['secret_key'] if not searx_debug \ or os.environ.get("WERKZEUG_RUN_MAIN") == "true" \ or os.environ.get('UWSGI_ORIGINAL_PROC_NAME') is not None: initialize_engines(settings['engines']) babel = Babel(app) rtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'glk', 'he', 'ku', 'mzn', 'pnb', 'ps', 'sd', 'ug', 'ur', 'yi'] # used when translating category names _category_names = (gettext('files'), gettext('general'), gettext('music'), gettext('social media'), gettext('images'), gettext('videos'), gettext('it'), gettext('news'),
# see https://flask.palletsprojects.com/en/1.1.x/cli/ # True if "FLASK_APP=searx/webapp.py FLASK_ENV=development flask run" flask_run_development = (os.environ.get("FLASK_APP") is not None and os.environ.get("FLASK_ENV") == "development" and is_flask_run_cmdline()) # True if reload feature is activated of werkzeug, False otherwise (including uwsgi, etc..) # __name__ != "__main__" if searx.webapp is imported (make test, make docs, uwsgi...) # see run() at the end of this file : searx_debug activates the reload feature. werkzeug_reloader = flask_run_development or (searx_debug and __name__ == "__main__") # initialize the engines except on the first run of the werkzeug server. if not werkzeug_reloader or (werkzeug_reloader and os.environ.get("WERKZEUG_RUN_MAIN") == "true"): initialize_engines(settings["engines"]) babel = Babel(app) rtl_locales = [ "ar", "arc", "bcc", "bqi", "ckb", "dv", "fa", "fa_IR", "glk", "he", "ku",