def load_config(config_file_path): _logger.debug("Making a default confing...") config = make_default_config(CONFIG_MAP) if config_file_path is not None: with open(config_file_path) as config_file: typetools.merge_dicts(config, _load_yaml(config_file)) validate_config(config, CONFIG_MAP) return config
def patch_config(pattern): with _config_lock: global _config assert _config is not None, "Run setup_config() first" config = copy.copy(_config) defaults = service.make_default_config(pattern) typetools.merge_dicts(config, typetools.merge_dicts(defaults, config)) service.validate_config(config, pattern) _config = config
def init(name, description, args=None, raw_config=None): global _config assert _config is None, "init() has already been called" args_parser = argparse.ArgumentParser(prog=name, description=description) args_parser.add_argument("-v", "--version", action="version", version=tools.get_version()) args_parser.add_argument("-c", "--config", dest="config_file_path", default=None, metavar="<file>") args_parser.add_argument("-l", "--level", dest="log_level", default=None) args_parser.add_argument("-m", "--dump-config", dest="dump_config", action="store_true") options = args_parser.parse_args(args) # Load configs raw_config = (raw_config or {}) if options.config_file_path is not None: raw_config = load_yaml_file(options.config_file_path) scheme = _get_config_scheme() config = optconf.make_config(raw_config, scheme) # Configure logging contextlog.patch_logging() contextlog.patch_threading() logging.setLogRecordFactory(_ClusterLogRecord) logging.captureWarnings(True) logging_config = raw_config.get("logging") if logging_config is None: logging_config = yaml.load(pkgutil.get_data(__name__, "configs/logging.yaml")) if options.log_level is not None: logging_config.setdefault("root", {}) logging_config["root"]["level"] = _valid_log_level(options.log_level) logging.config.dictConfig(logging_config) # Update scheme for backend opts backend_scheme = backends.get_backend_class(config.core.backend).get_options() typetools.merge_dicts(scheme, {"backend": backend_scheme}) config = optconf.make_config(raw_config, scheme) # Update scheme for selected helpers/modules for helper_name in config.helpers.configure: helper = importlib.import_module(helper_name) get_options = getattr(helper, "get_options", None) if get_options is None: raise RuntimeError("Helper '{}' requires no configuration".format(helper_name)) typetools.merge_dicts(scheme, {"helpers": get_options()}) # Provide global configuration for helpers _config = optconf.make_config(raw_config, scheme) # Print config dump and exit if options.dump_config: print(make_config_dump(_config, split_by=((), ("helpers",)))) sys.exit(0) return _config
def test_with_include(self): with write_file("nodes:\n - foo\n - bar\nstart-retries: 1") as include_path: with write_file("core:\n backend: zookeeper\nbackend: !include {}".format(include_path)) as main_path: raw = yaml.load_file(main_path) scheme = {"core": {"backend": optconf.Option(default="noop", type=str, help="")}} config = optconf.make_config(raw, scheme) assert raw["backend"]["nodes"] == ["foo", "bar"] assert config.core.backend == "zookeeper" typetools.merge_dicts(scheme, {"backend": zookeeper.Backend.get_options()}) config = optconf.make_config(raw, scheme) assert config.backend.nodes == ["foo", "bar"] assert config.backend.timeout == 10.0
def test_with_update(self, scheme): typetools.merge_dicts(scheme, {"backend": zookeeper.Backend.get_options()}) config = optconf.make_config({}, scheme) assert config.key1 == 1 assert config.key2 == 2 assert config.section1.key11 == 11.0 assert config.section1.key12 == 12.0 assert config.section1.section2.key21 == "21" assert config.backend.nodes == ["localhost:2181"] assert config.backend.timeout == 10.0 assert config.backend.start_timeout == 10.0 assert config.backend.start_retries == 1 assert config.backend.randomize_hosts is True assert config.backend.chroot is None
O_MAX_INPUT_QUEUE_SIZE: (50000, _valid_number_min_1), }, S_GIT: { O_REPO_URL: ("http://example.com", str), O_REPO_DIR: ("/tmp/rules.git", str), O_REVISIONS: (10, lambda arg: validators.common.valid_number(arg, 1)), }, S_LOGGING: {O_VERSION: (1, validators.common.valid_number)}, S_METERS: {}, S_SPLITTER: dict(_DAEMON_MAP), S_WORKER: dict(_DAEMON_MAP), S_COLLECTOR: typetools.merge_dicts( { O_POLL_INTERVAL: (10, _valid_number_min_1), O_ACQUIRE_DELAY: (5, _valid_number_min_1), O_RECYCLED_PRIORITY: (0, _valid_number_min_0), O_GARBAGE_LIFETIME: (0, _valid_number_min_0), }, dict(_DAEMON_MAP), ), S_API: {}, S_CHERRY: { "global": { "server.socket_host": ("0.0.0.0", lambda arg: validators.network.valid_ip_or_host(arg)[0]), "server.socket_port": (7887, validators.network.valid_port), } }, S_BACKDOOR: { O_ENABLED: (False, validators.common.valid_bool), O_PORT: (12309, validators.network.valid_port), O_LISTEN: (5, _valid_number_min_1),