def write(settings_path, settings_data, merge=True): """Write data to a settings file. :param settings_path: the filepath :param settings_data: a dictionary with data :param merge: boolean if existing file should be merged with new data """ settings_path = Path(settings_path) if settings_path.exists() and merge: # pragma: no cover with open( str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF) as open_file: object_merge(yaml.safe_load(open_file), settings_data) with open( str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF, ) as open_file: yaml.dump( settings_data, open_file, Dumper=yaml.dumper.SafeDumper, explicit_start=True, indent=2, default_flow_style=False, )
def _merge_before_set(self, existing, value): """Merge the new value being set with the existing value before set""" global_merge = getattr(self, "MERGE_ENABLED_FOR_DYNACONF", False) if isinstance(value, dict): local_merge = value.pop( "dynaconf_merge", value.pop("dynaconf_merge_unique", None) ) if local_merge not in (True, False, None) and not value: # In case `dynaconf_merge:` holds value not boolean - ref #241 value = local_merge if global_merge or local_merge: value = object_merge(existing, value) if isinstance(value, (list, tuple)): local_merge = ( "dynaconf_merge" in value or "dynaconf_merge_unique" in value ) if global_merge or local_merge: value = list(value) unique = False if local_merge: try: value.remove("dynaconf_merge") except ValueError: # EAFP value.remove("dynaconf_merge_unique") unique = True value = object_merge(existing, value, unique=unique) return value
def _dotted_set(self, dotted_key, value, tomlfy=False, **kwargs): """Sets dotted keys as nested dictionaries. Dotted set will always reassign the value, to merge use `@merge` token Arguments: dotted_key {str} -- A traversal name e.g: foo.bar.zaz value {Any} -- The value to set to the nested value. Keyword Arguments: tomlfy {bool} -- Perform toml parsing (default: {False}) """ split_keys = dotted_key.split(".") existing_data = self.get(split_keys[0], {}) new_data = DynaBox() tree = new_data for k in split_keys[:-1]: tree = tree.setdefault(k, {}) value = parse_conf_data(value, tomlfy=tomlfy) tree[split_keys[-1]] = value if existing_data: object_merge( old={split_keys[0]: existing_data}, new=new_data, tail=split_keys[-1], ) self.update(data=new_data, tomlfy=tomlfy, **kwargs)
def _dotted_set(self, dotted_key, value, tomlfy=False, merge=True, **kwargs): """Sets dotted keys as nested dictionaries. Arguments: dotted_key {str} -- A traversal name e.g: foo.bar.zaz value {Any} -- The value to set to the nested value. Keyword Arguments: tomlfy {bool} -- Perform toml parsing (default: {False}) merge {bool} -- Merge existing dictionaries (default: {True}) """ split_keys = dotted_key.split(".") existing_data = self.get(split_keys[0], {}) if merge else {} new_data = DynaBox(default_box=True) tree = new_data for k in split_keys[:-1]: tree = tree.setdefault(k, {}) value = parse_conf_data(value, tomlfy=tomlfy) tree[split_keys[-1]] = value if existing_data and merge: object_merge({split_keys[0]: existing_data}, new_data) self.update(data=new_data, tomlfy=tomlfy, **kwargs)
def set(self, key, value, loader_identifier=None, tomlfy=False): """Set a value storing references for the loader :param key: The key to store :param value: The value to store :param loader_identifier: Optional loader name e.g: toml, yaml etc. :param tomlfy: Bool define if value is parsed by toml (defaults False) """ value = parse_conf_data(value, tomlfy=tomlfy) key = key.strip().upper() if getattr(self, 'MERGE_ENABLED_FOR_DYNACONF', False): attr = getattr(self, key, None) object_merge(attr, value) if isinstance(value, dict): value = DynaBox(value, box_it_up=True) setattr(self, key, value) self.store[key] = value self._deleted.discard(key) # set loader identifiers so cleaners know which keys to clean if loader_identifier and loader_identifier in self.loaded_by_loaders: self.loaded_by_loaders[loader_identifier][key] = value elif loader_identifier: self.loaded_by_loaders[loader_identifier] = {key: value} elif loader_identifier is None: # if .set is called without loader identifier it becomes # a default value and goes away only when explicitly unset self._defaults[key] = value
def test_merge_dict_with_meta_values(): existing = {"A": 1, "B": 2, "C": 3} new = { "B": parse_conf_data("@del", tomlfy=True), "C": parse_conf_data("@reset 4", tomlfy=True), } object_merge(existing, new) assert new == {"A": 1, "C": 4}
def test_merge_existing_list(): existing = ["bruno", "karla"] object_merge(existing, existing) # calling twice the same object does not duplicate assert existing == ["bruno", "karla"] new = ["erik", "bruno"] object_merge(existing, new) assert new == ["bruno", "karla", "erik", "bruno"]
def test_merge_existing_dict(): existing = {"host": "localhost", "port": 666} new = {"user": "******"} # calling with same data has no effect object_merge(existing, existing) assert existing == {"host": "localhost", "port": 666} object_merge(existing, new) assert new == {"host": "localhost", "port": 666, "user": "******"}
def _merge_before_set(self, key, existing, value, is_secret): """Merge the new value being set with the existing value before set""" def _log_before_merging(_value): self.logger.debug("Merging existing %s: %s with new: %s", key, existing, _value) def _log_after_merge(_value): self.logger.debug("%s merged to %s", key, _value) global_merge = getattr(self, "MERGE_ENABLED_FOR_DYNACONF", False) if isinstance(value, dict): local_merge = value.pop("dynaconf_merge", value.pop("dynaconf_merge_unique", None)) if local_merge not in (True, False, None) and not value: # In case `dynaconf_merge:` holds value not boolean - ref #241 value = local_merge if global_merge or local_merge: safe_value = {k: "***" for k in value} if is_secret else value _log_before_merging(safe_value) object_merge(existing, value) safe_value = ({ k: ("***" if k in safe_value else v) for k, v in value.items() } if is_secret else value) _log_after_merge(safe_value) if isinstance(value, (list, tuple)): local_merge = ("dynaconf_merge" in value or "dynaconf_merge_unique" in value) if global_merge or local_merge: value = list(value) unique = False if local_merge: try: value.remove("dynaconf_merge") except ValueError: # EAFP value.remove("dynaconf_merge_unique") unique = True original = set(value) _log_before_merging( ["***" for item in value] if is_secret else value) object_merge(existing, value, unique=unique) safe_value = ( ["***" if item in original else item for item in value] if is_secret else value) _log_after_merge(safe_value) return value
def write(settings_path, settings_data, merge=True): """Write data to a settings file. :param settings_path: the filepath :param settings_data: a dictionary with data :param merge: boolean if existing file should be merged with new data """ settings_path = Path(settings_path) if settings_path.exists() and merge: # pragma: no cover with open( str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF) as open_file: object_merge(ConfigObj(open_file).dict(), settings_data) new = ConfigObj() new.update(settings_data) new.write(open(str(settings_path), "bw"))
def write(settings_path, settings_data, merge=True): """Write data to a settings file. :param settings_path: the filepath :param settings_data: a dictionary with data :param merge: boolean if existing file should be merged with new data """ if settings_path.exists() and merge: # pragma: no cover existing = DynaconfDict() load(existing, str(settings_path)) object_merge(existing, settings_data) with io.open(str(settings_path), 'w', encoding=default_settings.ENCODING_FOR_DYNACONF) as f: f.writelines([ "{} = {}\n".format(k.upper(), repr(v)) for k, v in settings_data.items() ])
def write(settings_path, settings_data, merge=True): """Write data to a settings file. :param settings_path: the filepath :param settings_data: a dictionary with data :param merge: boolean if existing file should be merged with new data """ settings_path = Path(settings_path) if settings_path.exists() and merge: # pragma: no cover existing = DynaconfDict() load(existing, str(settings_path)) object_merge(existing, settings_data) with open( str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF, ) as f: f.writelines( [f"{upperfy(k)} = {repr(v)}\n" for k, v in settings_data.items()])
def write(settings_path, settings_data, merge=True): """Write data to a settings file. :param settings_path: the filepath :param settings_data: a dictionary with data :param merge: boolean if existing file should be merged with new data """ if settings_path.exists() and merge: # pragma: no cover object_merge( toml.load( io.open(str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF)), settings_data) toml.dump( settings_data, io.open(str(settings_path), 'w', encoding=default_settings.ENCODING_FOR_DYNACONF))
def write(settings_path, settings_data, merge=True): """Write data to a settings file. :param settings_path: the filepath :param settings_data: a dictionary with data :param merge: boolean if existing file should be merged with new data """ settings_path = Path(settings_path) if settings_path.exists() and merge: # pragma: no cover with io.open( str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF) as open_file: object_merge(json.load(open_file), settings_data) with io.open( str(settings_path), "w", encoding=default_settings.ENCODING_FOR_DYNACONF, ) as open_file: json.dump(settings_data, open_file, cls=DynaconfEncoder)
def set( self, key, value, loader_identifier=None, tomlfy=False, dotted_lookup=True, is_secret=False, merge=False, ): """Set a value storing references for the loader :param key: The key to store :param value: The value to store :param loader_identifier: Optional loader name e.g: toml, yaml etc. :param tomlfy: Bool define if value is parsed by toml (defaults False) :param is_secret: Bool define if secret values is hidden on logs. :param merge: Bool define if existing nested data will be merged. """ nested_sep = self.get("NESTED_SEPARATOR_FOR_DYNACONF") if nested_sep and nested_sep in key: # turn FOO__bar__ZAZ in `FOO.bar.ZAZ` key = key.replace(nested_sep, ".") if "." in key and dotted_lookup is True: return self._dotted_set(key, value, loader_identifier=loader_identifier, tomlfy=tomlfy) value = parse_conf_data(value, tomlfy=tomlfy) key = upperfy(key.strip()) existing = getattr(self, key, None) if getattr(value, "_dynaconf_del", None): # just in case someone use a `@del` in a first level var. self.unset(key, force=True) return if getattr(value, "_dynaconf_reset", False): # pragma: no cover # just in case someone use a `@reset` in a first level var. # NOTE: @reset/Reset is deprecated in v3.0.0 value = value.unwrap() if getattr(value, "_dynaconf_merge", False): # just in case someone use a `@merge` in a first level var if existing: object_merge(existing, value.unwrap()) value = value.unwrap() if existing is not None and existing != value: # `dynaconf_merge` used in file root `merge=True` if merge: object_merge(existing, value) else: # `dynaconf_merge` may be used within the key structure value = self._merge_before_set(key, existing, value, is_secret) if isinstance(value, dict): value = DynaBox(value) setattr(self, key, value) self.store[key] = value self._deleted.discard(key) # set loader identifiers so cleaners know which keys to clean if loader_identifier and loader_identifier in self.loaded_by_loaders: self.loaded_by_loaders[loader_identifier][key] = value elif loader_identifier: self.loaded_by_loaders[loader_identifier] = {key: value} elif loader_identifier is None: # if .set is called without loader identifier it becomes # a default value and goes away only when explicitly unset self._defaults[key] = value
from dynaconf.utils import object_merge from dynaconf.utils.boxing import DynaBox existing_data = DynaBox({ "DATABASES": { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "PASSWORD": "******", "NAME": "pulp", "USER": "******", } } }) assert existing_data["DATABASES"]["default"]["USER"] == "pulp" new_data = DynaBox({"DATABASES": {"default": {"USER": "******"}}}) split_keys = ["DATABASES", "default", "USER"] new = object_merge(old=existing_data, new=new_data, full_path=split_keys) assert new["DATABASES"]["default"]["USER"] == "postgres" assert new["DATABASES"]["default"]["NAME"] == "pulp" assert new["DATABASES"]["default"]["PASSWORD"] == "pulp"
def test_merge_existing_list_unique(): existing = ["bruno", "karla"] new = ["erik", "bruno"] object_merge(existing, new, unique=True) assert new == ["karla", "erik", "bruno"]