def write(self, custom_config: dict = None): # Note: this is only dealing with user config if not os.path.exists(self.config_path): # Create an empty config with open(self.config_path, "a"): os.utime(self.config_path, None) try: if not custom_config: backplane_config = anyconfig.loads( json.dumps(self.toDict()), ac_parser="json" ) else: # Only write user config, not the whole thing user_config = anyconfig.load([str(self.config_path)]) anyconfig.merge(user_config, self.toDict(custom_config)) backplane_config = user_config # anyconfig.merge(backplane_config, config) # if os.path.exists(config_path): # Open ~/.backplane/contexts/default/backplane.yml # Save config as yml with open(self.config_path, "w+") as writer: writer.write(anyconfig.dumps(backplane_config, ac_parser="yaml")) return backplane_config except OSError as e: raise ConfigNotFound(e)
def merge_sources(self): # process sources with called provided function. This gives the caller a chance to # shape things up or set the src path prior to the filter, index and merge dlist = [] for s in self.sources: if s.error: continue if self.src_post_proc: self.src_post_proc(s) d = {s.root_path: s.contents} if s.root_path else s.contents # print d.keys() e = iterutils.unflatten(d, self.unflatten_separator) # print 'after unflatten', e dlist.append(iterutils.remap(e, reraise_visit=True, enter=lambda p, k, v: self.__filter_and_index(s, p, k, v))) # e = {'test': {'exclude': [['192.168.0.0/16'], ['172.16.0.0/12', '10.0.0.0/8'], '10.1.3.0/24']}} # dlist.append(iterutils.remap(e, enter=lambda p, k, v: self.__filter_and_index(s, p, k, v))) # then merge, putting the content under the root path for each source self.data = {} failed = [] for d in dlist: try: # print 'merging ', d anyconfig.merge(self.data, d) except: failed.append(d)
def merge_dicts(a, b): """ Merges the values of B into A and returns a mutated dict A. :: dict a b: - c: 0 - c: 2 d: e: "aaa" f: 3 dict b a: 1 b: - c: 3 d: e: "bbb" Will give an object such as:: {'a': 1, 'b': [{'c': 3}], 'd': {'e': "bbb", 'f': 3}} :param a: the target dictionary :param b: the dictionary to import :return: dict """ anyconfig.merge(a, b, ac_merge=MERGE_STRATEGY) return a
def load(path=None, with_defaults=False, validate=False): """ Load the configuration. :param str path: configuration file path. If set to `None`, it makes the configuration file optional, meaning that either only the defaults will be loaded or that the configuration will be empty. Otherwise the loading will fail if the file is not found. :param bool with_defaults: if `True`, loads the default values when they are not specified in the configuration file :param bool validate: if `True`, validates the configuration. If an error is detected, a `SyntaxError` will be raised. The error message should indicate which part of the configuration file was invalid. :returns: (dict) A dictionary representing the configuration. """ # Prepare the configuration dictionary, with the default values if requested. conf = CONFIGURATION_DEFAULTS if with_defaults else {} # Load the configuration file if specified. conf_from_file = {} if path is None else anyconfig.load(path) # Merge the configuration into the dictionary containing the defaults. # If `with_defaults` is False, this step simply loads the configuration file. anyconfig.merge(conf, conf_from_file) # Validate the configuration. if validate: (rc, err) = anyconfig.validate(conf, CONFIGURATION_SCHEMA) if not rc: raise SyntaxError(err) return conf
def _merge_default(self, default_file: str) -> None: if not os.path.isfile(default_file): raise IOError("Your default ({}) does not exist".format(default_file)) default_config = anyconfig.load(default_file) anyconfig.merge(default_config, self.loaded_config) self.loaded_config = default_config
def add(self, d, root_path): # try: d = {root_path: d} if root_path else d e = iterutils.unflatten(d, self.unflatten_separator) anyconfig.merge( self.data, iterutils.remap(e, lambda p, k, v: self.__filter_and_index(p, k, v)))
def _populate_item(self, item): for key, value in item: try: anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS) except ValueError as e: self.log.sysexit_with_message( "Unable to merge annotation values:\n{}".format(e))
def init_config(): getLogger(__name__).debug('Loading default configuration') cfg = load(_config_defaults, ignore_missing=False) getLogger(__name__).debug(f"Loading user configuration overrides {' '.join(_config_search)}") user_config = load(_config_search, ac_merge=MS_REPLACE, ignore_missing=True) merge(cfg, user_config) getLogger(__name__).debug(f"Configuration complete, dumping value") getLogger(__name__).debug(cfg) return cfg
def parse_config(config: dict) -> dict: import anyconfig base_file_list = config.pop('base') base_config = {} for base_file in base_file_list: tmp_config = anyconfig.load(open(base_file, 'rb')) if 'base' in tmp_config: tmp_config = parse_config(tmp_config) anyconfig.merge(tmp_config, base_config) base_config = tmp_config anyconfig.merge(base_config, config) return base_config
def __process_vault_keys(self, config): """ Takes the configuration loaded by AnyConfig and performs Vault secret loading and removes the vault_secrets section Args: - config: configuration dictionary from AnyConfig Returns: configuration dictionary """ vault_config_parts = self.__vault_keys_retrieve(config) merge(config, vault_config_parts) config.pop("vault_secrets", None) return config
def make(cls): """Makes the conf object, merging in the following order: - ENV - default config: default.yml - stage config: {stage}.yml - remote config: remote_settings """ anyconfig.merge(cls.conf, os.environ.copy()) stage = cls.conf.get("stage", None) project_config_dir = cls.conf.get("project_config_dir", ".") project_default_config_file_path = os.path.join( project_config_dir, "default.yml") if os.path.exists(project_default_config_file_path): anyconfig.merge(cls.conf, anyconfig.load(project_default_config_file_path)) project_stage_config_file_path = os.path.join(project_config_dir, f"{stage}.yml") if os.path.exists(project_stage_config_file_path): anyconfig.merge(cls.conf, anyconfig.load(project_stage_config_file_path)) remote_settings = cls.conf.get("use_remote_settings", None) if remote_settings: project_name = cls.conf.get("project_name", None) parameters_path = f"/{project_name}/{stage}/" remote_settings_class = cls.create_remote_settings_class() remote_conf = remote_settings_class.get_remote_params( parameters_path) anyconfig.merge(cls.conf, remote_conf)
def _populate_doc_data(self): """Generate the documentation data object.""" tags = defaultdict(dict) for annotaion in self.config.get_annotations_names(automatic=True): self.logger.info("Finding annotations for: @" + annotaion) self._annotation_objs[annotaion] = Annotation( name=annotaion, files_registry=self._files_registry ) tags[annotaion] = self._annotation_objs[annotaion].get_details() try: anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS) except ValueError as e: self.log.sysexit_with_message("Unable to merge annotation values:\n{}".format(e))
def _get_config(self): defaults = self._get_defaults() source_files = [] source_files.append(self.config_file) source_files.append(os.path.join(os.getcwd(), ".later")) source_files.append(os.path.join(os.getcwd(), ".later.yml")) source_files.append(os.path.join(os.getcwd(), ".later.yaml")) cli_options = self.args for config in source_files: if config and os.path.exists(config): with utils.open_file(config) as stream: s = stream.read() sdict = utils.safe_load(s) if self._validate(sdict): anyconfig.merge(defaults, sdict, ac_merge=anyconfig.MS_DICTS) defaults["logging"]["level"] = defaults["logging"][ "level"].upper() if cli_options and self._validate(cli_options): anyconfig.merge(defaults, cli_options, ac_merge=anyconfig.MS_DICTS) library = os.path.relpath( os.path.normpath(os.path.join(os.getcwd(), "library"))) autodetect = [] if os.path.exists(library): autodetect = [ os.path.splitext(f)[0] for f in os.listdir(library) if os.path.isfile(os.path.join(library, f)) and not f.startswith(".") ] for f in autodetect: if f not in defaults["ansible"]["custom_modules"]: defaults["ansible"]["custom_modules"].append(f) if defaults["rules"]["buildin"]: defaults["rules"]["standards"].append( os.path.join(resource_filename("ansiblelater", "rules"))) defaults["rules"]["standards"] = [ os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["standards"] ] return defaults
def load(self): try: if self.config_path.exists() and self.config_path.is_file(): current_config = self.toDict() custom_config = anyconfig.load([str(self.config_path)]) anyconfig.merge(current_config, custom_config) self.__dict__ = current_config return current_config # else: # raise ConfigNotFound pass except anyconfig.globals.UnknownFileTypeError as e: raise ConfigNotFound(e) except FileNotFoundError as e: raise ConfigNotFound(e)
def readConfig(config_path: str, backplane): try: backplane_config = backplane if os.path.exists(config_path): custom_config = anyconfig.load([config_path]) backplane_rc = backplane anyconfig.merge(backplane_rc, custom_config) backplane_config = backplane_rc return backplane_config except OSError as e: typer.secho( f"Couldn't read backplane config at {config_path}: {e}", err=True, fg=typer.colors.RED, ) sys.exit(1)
def test_20_dump_and_multi_load(self): obj_diff = dict(a=2, b=dict(b=[1, 2, 3, 4, 5], d='D')) with tempfile.TemporaryDirectory() as tmpdir: a_path = pathlib.Path(tmpdir) / 'a.json' b_path = pathlib.Path(tmpdir) / 'b.json' TT.dump(self.obj, a_path) self.assertTrue(a_path.exists()) TT.dump(obj_diff, b_path) self.assertTrue(b_path.exists()) ref = copy.copy(self.obj) obj_1 = TT.multi_load([a_path, b_path], ac_merge=TT.MS_DICTS) TT.merge(ref, obj_diff, ac_merge=TT.MS_DICTS) self.assertEqual(obj_1, ref)
def __init__(self): self.config = anyconfig.loads(self.DEFAULT_CONFIG, ac_parser='yaml') file_name = os.environ.get('LOCUST_CONFIG') if not file_name or not os.path.isfile(file_name): raise Exception('invalid test configuration for locust, ' 'check LOCUST_CONFIG environment variable.') anyconfig.merge(self.config, anyconfig.load(file_name, ac_parser='yaml')) self.config['s3']['endpoint'] = os.getenv( 'S3_ENDPOINT', self.config['s3']['endpoint']) self.config['s3']['endpoint'] = [ u.strip() for u in self.config['s3']['endpoint'].split(',') ] self.config['s3']['access_key'] = os.getenv( 'S3_ACCESS_KEY', self.config['s3']['access_key']) self.config['s3']['access_secret'] = os.getenv( 'S3_ACCESS_SECRET', self.config['s3']['access_secret']) if 'cache' in self.config: self.config['cache']['server'] = os.getenv( 'LT_CACHE_SERVER', self.config['cache'].get('server')) self.config['cache']['port'] = os.getenv( 'LT_CACHE_SERV_PORT', self.config['cache'].get('port')) self.config['cache']['db'] = os.getenv( 'LT_CACHE_SERV_DB', self.config['cache'].get('db')) try: config_weights = self.config['data']['weights'] for value in config_weights.values(): value['LOW'] = parse_size(str(value['LOW'])) value['HIGH'] = parse_size(str(value['HIGH'])) except KeyError: pass try: self.config['ops']['put_object']['limit'][ 'size_limit'] = parse_size( str(self.config['ops']['put_object']['limit'] ['size_limit'])) except KeyError: pass
def __init__(self, file_path=FILENAME, spec_file_path=SPEC_FILENAME): self.config = DEFAULT_CONF conf_ = anyconfig.load(file_path) config_spec = anyconfig.load(spec_file_path) import re os_environ = environ.copy() re_exp = re.compile('radar_iot_.*') allowed_keys = filter(re_exp.match, os_environ.keys()) environ_vars = { re.sub("radar_iot_", "", key, count=1): os_environ[key] for key in allowed_keys } anyconfig.merge(self.config, environ_vars) anyconfig.merge(self.config, conf_) (rc, err) = anyconfig.validate(self.config, config_spec) if rc is False or err != '': raise AttributeError('Invalid configuration', err) else: self.logger.info( f'Successfully loaded configuration from {self.FILENAME}')
def __vault_keys_retrieve(self, config): """ Connects to the Vault to retrieve keys specified in the config dictionary Args: - config: configuration dict Returns: Updated vault configuration pieces """ vault_config_parts = {} if self.pass_through_flag: warn( "VaultAnyconfig is set to Passthrough mode, but secrets are configured in configuration. These secrets will not be loaded.", UserWarning, ) return vault_config_parts for secret, path in config.get("vault_secrets", {}).items(): config_key_path = secret.split(".") secret_path = path # Optionally map the key in the configuration to a different key in the Vault secret_path_split = secret_path.split(".") if len(secret_path_split) > 1: secret_path = ".".join(secret_path_split[:-1]) secret_key = secret_path_split[-1] else: secret_key = config_key_path[-1] read_vault_secret = self.__process_response( self.read(secret_path), secret_key) config_part = self.__get_nested_config(config_key_path, read_vault_secret) merge(vault_config_parts, config_part) return vault_config_parts
def cast_config(obj, required=False): if obj is None: if not required: return {} else: raise Exception("Config required") elif isinstance(obj, collections.abc.Mapping): return obj elif isinstance(obj, str): if os.path.exists(obj): return anyconfig.load(obj) else: if required: raise Exception("Config file not found {}".format(obj)) else: return {} elif isinstance(obj, list): res = {} for v in obj: anyconfig.merge(res, cast_config(v), ac_merge=anyconfig.MS_DICTS) return res else: raise Exception("Unknown config type")
def merge_dicts(a, b): """ Merges the values of B into A and returns a new dict. Uses the same merge strategy as ``config._combine``. :: dict a b: - c: 0 - c: 2 d: e: "aaa" f: 3 dict b a: 1 b: - c: 3 d: e: "bbb" Will give an object such as:: {'a': 1, 'b': [{'c': 3}], 'd': {'e': "bbb", 'f': 3}} :param a: the target dictionary :param b: the dictionary to import :return: dict """ conf = a anyconfig.merge(a, b, ac_merge=MERGE_STRATEGY) return conf
def start(): opts = DEFAULT_OPTIONS # Remove default (None) values ops = { k: v for k, v in vars(parse_prelim_arguments()[0]).items() if v is not None } # Check if workdir is set, otherwise assume cwd if ops['workdir'] == "": ops['workdir'] = os.getcwd() else: ops['workdir'] = os.path.abspath(ops['workdir']) # Merge base config and file config (giving priority to file config) anyconfig.merge(opts, parse_config_file_arguments(ops['workdir']), ac_merge=anyconfig.MS_DICTS_AND_LISTS, ac_parse_value=True) # Merge release into config to overwrite for release mode if ('release' in ops and ops['release']) or ('config' in opts and 'release' in opts['config'] and opts['config']['release']): anyconfig.merge(opts['config'], opts['release'], ac_merge=anyconfig.MS_DICTS_AND_LISTS) # Merge CLI args anyconfig.merge(opts, {'config': ops}, ac_merge=anyconfig.MS_DICTS_AND_LISTS) opts = opts['config'] opts = pythonify(opts) # Validate the config schema = anyconfig.load( __file__.replace("main.py", "") + "/concept/config_schema.json") anyconfig.validate(opts, schema, ac_schema_safe=False) # schema = anyconfig.gen_schema(opts, ac_schema_type="strict") # schema_s = anyconfig.dumps(schema, "json") # # with open(__file__.replace("main.py", "") + "/concept/config_schema.json", "w") as file: # file.write(schema_s) if opts['print_options']: print(json.dumps(opts, sort_keys=True, indent=4)) opts = munchify(opts) main(opts)
def _set_config(self): args = self._get_args(self._args) envs = self._get_envs() defaults = self._get_defaults() # preset config file path if envs.get("config_file"): self.config_file = self._normalize_path(envs.get("config_file")) if envs.get("role_dir"): self.role_dir = self._normalize_path(envs.get("role_dir")) if args.get("config_file"): self.config_file = self._normalize_path(args.get("config_file")) if args.get("role_dir"): self.role_dir = self._normalize_path(args.get("role_dir")) source_files = [] source_files.append(self.config_file) source_files.append(os.path.join(os.getcwd(), ".ansibledoctor")) source_files.append(os.path.join(os.getcwd(), ".ansibledoctor.yml")) source_files.append(os.path.join(os.getcwd(), ".ansibledoctor.yaml")) for config in source_files: if config and os.path.exists(config): with open(config, "r", encoding="utf8") as stream: s = stream.read() try: file_dict = ruamel.yaml.safe_load(s) except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e: message = "{} {}".format(e.context, e.problem) raise ansibledoctor.exception.ConfigError( "Unable to read config file {}".format(config), message) if self._validate(file_dict): anyconfig.merge(defaults, file_dict, ac_merge=anyconfig.MS_DICTS) defaults["logging"]["level"] = defaults["logging"][ "level"].upper() if self._validate(envs): anyconfig.merge(defaults, envs, ac_merge=anyconfig.MS_DICTS) if self._validate(args): anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS) fix_files = ["output_dir", "template_dir", "custom_header"] for file in fix_files: if defaults[file] and defaults[file] != "": defaults[file] = self._normalize_path(defaults[file]) if "config_file" in defaults: defaults.pop("config_file") if "role_dir" in defaults: defaults.pop("role_dir") defaults["logging"]["level"] = defaults["logging"]["level"].upper() self.config = defaults
def _load_config(self): """ Load configuration from configuration files and environment variables. Search order, latest has presedence: 1. hard coded defaults 2. `/etc/holmscan.yaml` 3. `/etc/holmscan.d/*.yaml` 4. `~/.config/holmscan.yaml` 5. `~/.config/holmscan.d/*.yaml` 6. environment variables """ environ = os.environ.copy() log.debug("Loading configuration defaults") conf = copy.deepcopy(DEFAULTS) os.environ["XDG_CONFIG_DIRS"] = "/etc" site_conf_file = os.path.join( appdirs.site_config_dir("holmscan") + ".yaml") log.debug("Loading configuration file: {0}".format(site_conf_file)) anyconfig.merge( conf, { k: v for k, v in dict( anyconfig.load(site_conf_file, ignore_missing=True)).items( ) if k in CONFIGURABLES }, ) site_conf_dir = os.path.join( appdirs.site_config_dir("holmscan") + ".d", "*.yaml") log.debug("Loading configuration files: {0}".format(site_conf_dir)) anyconfig.merge( conf, { k: v for k, v in dict(anyconfig.load(site_conf_dir)).items() if k in CONFIGURABLES }, ) user_conf_file = os.path.join( appdirs.user_config_dir("holmscan")) + ".yaml" log.debug("Loading configuration file: {0}".format(user_conf_file)) anyconfig.merge( conf, { k: v for k, v in dict( anyconfig.load(user_conf_file, ignore_missing=True)).items( ) if k in CONFIGURABLES }, ) user_conf_dir = os.path.join( appdirs.user_config_dir("holmscan") + ".d", "*.yaml") log.debug("Loading configuration files: {0}".format(user_conf_dir)) anyconfig.merge( conf, { k: v for k, v in dict(anyconfig.load(user_conf_dir)).items() if k in CONFIGURABLES }, ) log.debug("Loading configuration from environment") anyconfig.merge( conf, {k: v for k, v in environ.items() if k in CONFIGURABLES}) return conf
def merge_dicts(a, b): conf = a anyconfig.merge(a, b, ac_merge=MERGE_STRATEGY) return conf
msg = "Failed to validate.\n\n{}".format(errors) util.sysexit_with_message(msg) >>>>>>> 0fa82e7a3daa84ebd03d8af67403c6551113d3e4:molecule/config.py ======= >>>>>>> b1eb06d375fd544a849fcf5c39f51dc334b87338:Rake/molecule/__GEMS_.py/__GEMS_.py/apt-py.git/commandinit.yaml/init.yml/config.py >>>>>>> e91355cf081d9dcd78efe38cdcc6f0353a1aa3ac {'a': 1, 'b': [{'c': 3}], 'd': {'e': "bbb", 'f': 3}} :param a: the target dictionary :param b: the dictionary to import :return: dict """ conf = a anyconfig.merge(a, b, ac_merge=MERGE_STRATEGY) return conf def molecule_directory(path): return os.path.join(path, MOLECULE_DIRECTORY) def molecule_file(path): return os.path.join(path, MOLECULE_FILE) def molecule_drivers(): return [ azure.Azure(None).name,