def load_rule_changes(self): ''' Using the modification times of rule config files, syncs the running rules to match the files in rules_folder by removing, adding or reloading rules. ''' rule_hashes = get_rule_hashes(self.conf) # Check each current rule for changes for rule_file, hash_value in self.rule_hashes.iteritems(): if rule_file not in rule_hashes: # Rule file was deleted logging.info( 'Rule file %s not found, stopping rule execution' % (rule_file)) self.rules = [ rule for rule in self.rules if rule['rule_file'] != rule_file ] continue if hash_value != rule_hashes[rule_file]: # Rule file was changed, reload rule try: new_rule = load_configuration( os.path.join(self.conf['rules_folder'], rule_file)) except EAException as e: self.handle_error('Could not load rule %s: %s' % (rule_file, e)) continue logging.info("Reloading configuration for rule %s" % (rule_file)) # Initialize the rule that matches rule_file self.rules = [ rule if rule['rule_file'] != rule_file else self.init_rule( new_rule, False) for rule in self.rules ] # Load new rules if not self.args.rule: for rule_file in set(rule_hashes.keys()) - set( self.rule_hashes.keys()): try: new_rule = load_configuration( os.path.join(self.conf['rules_folder'], rule_file)) except EAException as e: self.handle_error('Could not load rule %s: %s' % (rule_file, e)) continue logging.info('Loaded new rule %s' % (rule_file)) self.rules.append(self.init_rule(new_rule)) self.rule_hashes = rule_hashes
def create_app(config_mode): """Create a Flask application instance.""" # Create application instance; # Get the configuration settings # for instance and update it app = Flask(__name__, instance_relative_config=True, template_folder='../static/templates', static_folder='../static') app.wsgi_app = ProxyFix(app.wsgi_app) load_configuration(app, config_mode) setup_core_tools(app) # celery = make_celery(app) _setup_blueprints(app) return app
def test_load_configuration_with_empty_envvars(self): expected_config_dict = { "QUERIDO_DIARIO_ELASTICSEARCH_HOST": "", "QUERIDO_DIARIO_ELASTICSEARCH_INDEX": "", "QUERIDO_DIARIO_API_ROOT_PATH": "", "QUERIDO_DIARIO_URL_PREFIX": "", "QUERIDO_DIARIO_CORS_ALLOW_ORIGINS": [""], "QUERIDO_DIARIO_CORS_ALLOW_CREDENTIALS": True, "QUERIDO_DIARIO_CORS_ALLOW_METHODS": [""], "QUERIDO_DIARIO_CORS_ALLOW_HEADERS": [""], } configuration = load_configuration() self.check_configuration_values(configuration, expected_config_dict)
def test_load_configuration_with_envvars_defined(self): expected_config_dict = { "QUERIDO_DIARIO_ELASTICSEARCH_HOST": "000.0.0.0", "QUERIDO_DIARIO_ELASTICSEARCH_INDEX": "myindex", "QUERIDO_DIARIO_API_ROOT_PATH": "api/", "QUERIDO_DIARIO_URL_PREFIX": "https://test.com", "QUERIDO_DIARIO_CORS_ALLOW_ORIGINS": ["localhost"], "QUERIDO_DIARIO_CORS_ALLOW_CREDENTIALS": True, "QUERIDO_DIARIO_CORS_ALLOW_METHODS": ["GET", "POST"], "QUERIDO_DIARIO_CORS_ALLOW_HEADERS": ["X-Test-Test"], } configuration = load_configuration() self.check_configuration_values(configuration, expected_config_dict)
def load_rule_changes(self): ''' Using the modification times of rule config files, syncs the running rules to match the files in rules_folder by removing, adding or reloading rules. ''' rule_hashes = get_rule_hashes(self.conf) # Check each current rule for changes for rule_file, hash_value in self.rule_hashes.iteritems(): if rule_file not in rule_hashes: # Rule file was deleted logging.info('Rule file %s not found, stopping rule execution' % (rule_file)) self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] continue if hash_value != rule_hashes[rule_file]: # Rule file was changed, reload rule try: new_rule = load_configuration(os.path.join(self.conf['rules_folder'], rule_file)) except EAException as e: self.handle_error('Could not load rule %s: %s' % (rule_file, e)) continue logging.info("Reloading configuration for rule %s" % (rule_file)) # Initialize the rule that matches rule_file self.rules = [rule if rule['rule_file'] != rule_file else self.init_rule(new_rule, False) for rule in self.rules] # Load new rules if not self.args.rule: for rule_file in set(rule_hashes.keys()) - set(self.rule_hashes.keys()): try: new_rule = load_configuration(os.path.join(self.conf['rules_folder'], rule_file)) except EAException as e: self.handle_error('Could not load rule %s: %s' % (rule_file, e)) continue logging.info('Loaded new rule %s' % (rule_file)) self.rules.append(self.init_rule(new_rule)) self.rule_hashes = rule_hashes
def run(self): """Start the bot""" logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) config = load_configuration() print(config.token) updater = Updater(token=config.token) dispatcher = updater.dispatcher commands = { 'start': StartCommand().execute, 'register': RegisterCommand().execute, 'pidor': PidorCommand().execute, 'stats': StatsCommand().execute, } for command, handler in commands.items(): dispatcher.add_handler(CommandHandler(command, handler)) dispatcher.add_error_handler(self.error_handler) updater.start_polling()
) from gazettes import ( create_gazettes_interface, create_gazettes_data_gateway, create_gazettes_query_builder, ) from index import create_search_engine_interface from suggestions import create_suggestion_service from themed_excerpts import ( create_themes_database_gateway, create_themed_excerpts_data_gateway, create_themed_excerpts_interface, create_themed_excerpts_query_builder, ) configuration = load_configuration() search_engine = create_search_engine_interface( configuration.host, configuration.gazette_index ) gazettes_query_builder = create_gazettes_query_builder( configuration.gazette_content_field, configuration.gazette_content_exact_field_suffix, configuration.gazette_publication_date_field, configuration.gazette_territory_id_field, ) gazettes_search_engine_gateway = create_gazettes_data_gateway( search_engine, gazettes_query_builder, configuration.gazette_index ) gazettes_interface = create_gazettes_interface(gazettes_search_engine_gateway)
if name == "--seeds": seeds = value.split(',') if name == "--load-limit": load_limit = value if name == "--bootstrap": load_limit = None if name == "--bootstrap-limit": bootstrap_limit = value if name == "--rm-limit": rm_limit = value host, port = helpers.normalize_addr(bind) if host is None: die('Invalid binding address %s' % bind) sync.init_cluster_state('%s:%s' % (host, port)) # FIXME avoid adding these seeds in cluster for seed in seeds: r = sync.cluster_state.add_instance(seed) # Load old configuration data.set_bootstrap_limit(bootstrap_limit=bootstrap_limit) config.load_configuration(load_limit=load_limit) # Remove old files config.set_rm_limit(rm_limit=rm_limit) config.rm_old_files() # Run the server server.run((host, port))
# print(f"Blocking {website}...") file.write(f"{REDIRECT} {website}") config.save_configuration(True) def unblock(): blacklist = load_blacklist() with open(HOSTS_PATH, "r+") as file: content = file.readlines() file.seek(0) for line in content: if not any(website in line for website in blacklist): file.write(line) file.truncate() config.save_configuration(False) if __name__ == '__main__': is_blocked = config.load_configuration() state = "blocked" if is_blocked else "unblocked" print(f"State: {state}") user_action = input("Block or unblock (b/u)?\n") if user_action == 'b': if not is_blocked: block() elif user_action == 'u': if is_blocked: unblock() else: print("Unknown command")
seeds = value.split( ',') if name == "--load-limit": load_limit = value if name == "--bootstrap": load_limit = None if name == "--bootstrap-limit": bootstrap_limit = value if name == "--rm-limit": rm_limit = value host, port = helpers.normalize_addr( bind) if host is None: die( 'Invalid binding address %s'%bind) sync.init_cluster_state( '%s:%s'%(host, port)) # FIXME avoid adding these seeds in cluster for seed in seeds: r = sync.cluster_state.add_instance( seed) # Load old configuration data.set_bootstrap_limit( bootstrap_limit=bootstrap_limit) config.load_configuration( load_limit=load_limit) # Remove old files config.set_rm_limit( rm_limit=rm_limit) config.rm_old_files() # Run the server server.run( ( host, port))
request_result = emon_send(warning) logging.info("Warning request result: " + request_result.text) mqtt_publish_dict(MQTT_TOPIC, warning) rpi_temp = temperature_of_raspberry_pi() output = {"rpi_temp": rpi_temp} request_result = emon_send(output) mqtt_publish_dict(MQTT_TOPIC, output) logging.info("Temp Request result: " + request_result.text) except Exception as e: logging.error("Could not process routine: " + str(e)) if __name__ == '__main__': # Load configuration configuration = config.load_configuration() protocol = configuration["protocol"] hostname = configuration["hostname"] port = configuration["port"] path = configuration["path"] EMONCMS_INSTANCE = f"{protocol}://{hostname}:{port}/{path}" API_KEY = configuration["api_key"] NODE_NAME = configuration["node_name"] MQTT_TOPIC = configuration[ "mqtt_topic"] if "mqtt_topic" in configuration else "" # Initialize MQTT mqtt_setup(configuration) ser = serial_init()