def as_yaml(self, as_string=True): from xmen.utils import dic_to_yaml from ruamel.yaml.comments import CommentedMap params = { k: getattr(self, k) for k in dir(self) if k in self.param_keys() or k in self._specials } params = { k: v for k, v in params.items() if '_' + k not in self.__dict__ } helps = self.get_param_helps() # Add definition module to experiment object map = CommentedMap() for i, (k, v) in enumerate(params.items()): if self._status == DEFAULT: if k in [ '_root', '_status', '_purpose', '_messages', '_origin', '_timestamps', '_notes', '_type' ]: continue comment = helps[k] if comment == '': comment = None map.insert(i, k, v, comment=comment) if as_string: return dic_to_yaml(map) else: return map
def fill_config(data_obj): """ Make sample config """ data_obj.insert(len(data_obj), "url", "https://jira.example.com", comment="Jira URL") data_obj.insert(len(data_obj), "username", "some_username", comment="Jira login") data_obj.insert(len(data_obj), "password", "SomeSecurePassword", comment="Jira password") data_obj.insert(len(data_obj), "project", "SOME-PROJECT", comment="Jira project") data_obj.insert(len(data_obj), "fields", CommentedMap(), comment="Fields for created tickets") fields_obj = data_obj["fields"] fields_obj.insert(len(fields_obj), "Issue Type", "Bug", comment="(field) Ticket type") fields_obj.insert(len(fields_obj), "Assignee", "Ticket_Assignee", comment="(field) Assignee") fields_obj.insert(len(fields_obj), "Epic Link", "SOMEPROJECT-1234", comment="(field) Epic") fields_obj.insert(len(fields_obj), "Security Level", "SOME_LEVEL", comment="(field) Security level") fields_obj.insert(len(fields_obj), "Components/s", CommentedSeq(), comment="(field) Component/s") components_obj = fields_obj["Components/s"] component_obj = CommentedMap() component_obj.insert(len(component_obj), "name", "Component Name") components_obj.append(component_obj) data_obj.insert(len(data_obj), "custom_mapping", CommentedMap(), comment="Custom priority mapping") mapping_obj = data_obj["custom_mapping"] mapping_obj.insert(len(mapping_obj), "Critical", "Very High") mapping_obj.insert(len(mapping_obj), "Major", "High") mapping_obj.insert(len(mapping_obj), "Medium", "Medium") mapping_obj.insert(len(mapping_obj), "Minor", "Low") mapping_obj.insert(len(mapping_obj), "Trivial", "Low")
def _to_yaml(root: CommentedMap, obj): if isinstance(obj, Nestable): data = CommentedMap() for i, prop in enumerate( getattr(type(obj), x) for x in getattr(obj, '_props', [])): root.insert(i, prop.name, _to_yaml(data, getattr(obj, prop.name)), comment=prop.comment) return root return obj
def make_template(self): """ Make a cwl job file template. Inspired by cwl-tool --make-template (calls cwl-tool.main.generate_input_template https://github.com/common-workflow-language/cwltool/blob/main/cwltool/main.py) :return: """ template = CommentedMap() for input in self.cwl_inputs: input_name = get_shortened_id(input.id) template_param_value, comment = self._make_input_value_field( input, self._cwl_schema_def_requirement) template.insert(0, input_name, template_param_value, comment) return template
def create_yml_config_white_list(self, white_list: Dict[str, Any] = None, diff: bool = False, indent: int = 0) -> Any: """The function creates the configuration YML file. :param white_list: Dictionary with lists of registers and its bitfields :param diff: Get only configuration with difference value to reset state. :param indent: Indent in space to generate YML. :return: YAML commented map of registers value. """ data = CM() ix = 0 for reg in self.get_registers(): if white_list and reg.name not in white_list.keys(): continue reg_yml = CM() if diff and reg.get_value() == reg.get_reset_value(): continue descr = reg.description if reg.description != "." else reg.name descr = descr.replace(" ", "\n# ") data.insert( ix, reg.name, reg_yml, comment=descr, ) ix += 1 bitfields = reg.get_bitfields() if len(bitfields) > 0 and white_list and white_list[reg.name]: btf_yml = CM() for i, bitfield in enumerate(bitfields): if diff and bitfield.get_value( ) == bitfield.get_reset_value(): continue if (white_list and isinstance(white_list[reg.name], list) and bitfield.name not in white_list[reg.name]): continue btf_yml.insert( pos=i, key=bitfield.name, value=bitfield.get_enum_value(), comment=self._get_bitfield_yaml_description( bitfield, indent + SPSDK_YML_INDENT + SPSDK_YML_INDENT), ) reg_yml.insert(1, "bitfields", btf_yml, comment="The register bitfields") else: reg_yml.insert( 1, "value", reg.get_hex_value(), comment=f"The value width: {reg.width}b", ) return data
def test_datatype_is_CommentedSeq(self): c = CommentedSeq() c.insert(0, "key") c.insert(1, "to") c2 = CommentedMap() c2.insert(0, "to", "from") c2.insert(1, "__from__", "to") c.insert(2, c2) result = CommentedSeq() result.append("key") result.append("to") result.append("to") self.assertEqual(result, parse_for_variable_hierarchies(c, "__from__"))
def _recursive_build_dict(self, comment_map: CommentedMap, source_dict: dict, index: int, deepness: int) -> int: # If you find a way to sanely do this without going over it multiple times and not having it be recursive, be # my guest. cur_index = index for key in source_dict: if not isinstance(key, str) or not key.startswith("_"): if isinstance(source_dict[key], dict): new_map = CommentedMap() comment_map.insert(cur_index, key, new_map) cur_index += 1 cur_index = self._recursive_build_dict( new_map, source_dict[key], cur_index, deepness + 1) else: comment_map.insert(cur_index, key, source_dict[key]) cur_index += 1 if key_comment := source_dict.get(f"_c_{key}", None): comment_map.yaml_set_comment_before_after_key( key, key_comment, deepness * 4)
def _recursive_build_dict(self, comment_map: CommentedMap, source_dict: dict, index: int, deepness: int) -> int: # If you find a way to sanely do this without going over it multiple times and not having it be recursive, be # my guest. cur_index = index for key in source_dict: if not key.startswith("_"): if isinstance(source_dict[key], dict): new_map = CommentedMap() comment_map.insert(cur_index, key, new_map) cur_index += 1 cur_index = self._recursive_build_dict( new_map, source_dict[key], cur_index, deepness + 1) else: comment_map.insert(cur_index, key, source_dict[key]) cur_index += 1 # TODO: Change following if statement to use the walrus operator once 3.8+ becomes minimum. key_comment = source_dict.get(f"_c_{key}", None) if key_comment: comment_map.yaml_set_comment_before_after_key( key, key_comment, deepness * 4) return cur_index
def create_yml_config(self, file_name: str, raw: bool = False) -> None: """The function creates the configuration YML file. :param file_name: The file_name (without extension) of stored configuration. :param raw: Raw output of configuration (including computed fields and anti-pole registers) """ antipole_regs = None if raw else list( self.config.get_antipole_regs(self.device).values()) computed_fields = None if raw else self.config.get_computed_fields( self.device) yaml = YAML() yaml.indent(sequence=SPSDK_YML_INDENT * 2, offset=SPSDK_YML_INDENT) data = CM() description = CM() description.yaml_set_start_comment( f"NXP {self.device.upper()} Shadow registers configuration", indent=2) description.insert(1, "device", self.device, comment="The NXP device name.") description.insert(2, "version", __version__, comment="The SPSDK Shadow register tool version.") description.insert(3, "author", __author__, comment="The author of the configuration.") description.insert(4, "release", __release__, comment="The SPSDK release.") data["description"] = description data["registers"] = self.regs.create_yml_config( exclude_regs=antipole_regs, exclude_fields=computed_fields, indent=2) with open(file_name, "w", encoding="utf8") as out_file: yaml.dump(data, out_file)
def fill_config(data_obj): """ Make sample config """ data_obj.insert( len(data_obj), "scan_types", "all", comment= "ZAP scan type, supported any combination of: 'all', 'xss', 'sqli'" ) data_obj.insert(len(data_obj), "target", "http://*****:*****@ssw0rd", comment="(optional) User password for authenticated scan") data_obj.insert( len(data_obj), "auth_script", CommentedSeq(), comment="(optional) Selenium-like script for authenticated scan") script_obj = data_obj["auth_script"] for command in [{ "command": "open", "target": "%Target%/login", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_login", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_password", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_0", "value": "" }, { "command": "type", "target": "id=login_login", "value": "%Username%" }, { "command": "type", "target": "id=login_password", "value": "%Password%" }, { "command": "clickAndWait", "target": "id=login_0", "value": "" }]: command_obj = CommentedMap() command_obj.fa.set_flow_style() for key in ["command", "target", "value"]: command_obj.insert(len(command_obj), key, command[key]) script_obj.append(command_obj) data_obj.insert( len(data_obj), "bind_all_interfaces", True, comment="(optional) Bind ZAP to all interfaces or only to localhost" ) data_obj.insert(len(data_obj), "daemon_debug", False, comment="(optional) Send ZAP daemon output to stdout") data_obj.insert(len(data_obj), "java_options", "-Xmx1g", comment="(optional) Java options for ZAP daemon") data_obj.insert( len(data_obj), "split_by_endpoint", False, comment="(optional) Create separate findings for every endpoint") data_obj.insert( len(data_obj), "passive_scan_wait_threshold", 0, comment="(optional) Wait until N items left in passive scan queue") data_obj.insert( len(data_obj), "passive_scan_wait_limit", 600, comment="(optional) Time limit (in seconds) for passive scan") data_obj.insert( len(data_obj), "external_zap_daemon", "http://192.168.0.2:8091", comment= "(optional) Do not start internal ZAP daemon, use external one") data_obj.insert(len(data_obj), "external_zap_api_key", "dusty", comment="(optional) API key for external ZAP daemon") data_obj.insert( len(data_obj), "save_intermediates_to", "/data/intermediates/dast", comment= "(optional) Save scan intermediates (raw results, logs, ...)")
def test_datatype_is_CommentedMap(self): c = CommentedMap() c.insert(0, "key", "value") c.insert(1, "__from__", "test") self.assertEqual("test", parse_for_variable_hierarchies(c, "__from__")) c1 = CommentedMap() c1.insert(0, "key", "value") c2 = CommentedMap() c2.insert(0, "to", "from") c2.insert(1, "__from__", "to") c1.insert(2, "someseq", c2) result = CommentedMap() result.insert(0, "key", "value") result.insert(1, "someseq", "to") self.assertEqual(result, parse_for_variable_hierarchies(c1, "__from__"))
def fill_config(data_obj): """ Make sample config """ data_obj.insert(len(data_obj), "url", "https://jira.example.com", comment="Jira URL") data_obj.insert(len(data_obj), "username", "some_username", comment="Jira login") data_obj.insert(len(data_obj), "password", "SomeSecurePassword", comment="Jira password") data_obj.insert(len(data_obj), "project", "SOME-PROJECT", comment="Jira project") data_obj.insert(len(data_obj), "fields", CommentedMap(), comment="Fields for created tickets") fields_obj = data_obj["fields"] fields_obj.insert(len(fields_obj), "Issue Type", "Bug", comment="(field) Ticket type") fields_obj.insert(len(fields_obj), "Assignee", "Ticket_Assignee", comment="(field) Assignee") fields_obj.insert(len(fields_obj), "Epic Link", "SOMEPROJECT-1234", comment="(field) Epic") fields_obj.insert(len(fields_obj), "Security Level", "SOME_LEVEL", comment="(field) Security level") fields_obj.insert(len(fields_obj), "Components/s", CommentedSeq(), comment="(field) Component/s") components_obj = fields_obj["Components/s"] component_obj = CommentedMap() component_obj.insert(len(component_obj), "name", "Component Name") components_obj.append(component_obj) data_obj.insert(len(data_obj), "custom_mapping", CommentedMap(), comment="Custom priority mapping") mapping_obj = data_obj["custom_mapping"] mapping_obj.insert(len(mapping_obj), "Critical", "Very High") mapping_obj.insert(len(mapping_obj), "Major", "High") mapping_obj.insert(len(mapping_obj), "Medium", "Medium") mapping_obj.insert(len(mapping_obj), "Minor", "Low") mapping_obj.insert(len(mapping_obj), "Trivial", "Low") data_obj.insert( len(data_obj), "separate_epic_linkage", False, comment="(optional) Link to Epics after ticket creation") data_obj.insert( len(data_obj), "max_description_size", constants.JIRA_DESCRIPTION_MAX_SIZE, comment="(optional) Cut description longer than set limit")
def fill_config(data_obj): """ Make sample config """ data_obj.insert( len(data_obj), "scan_types", "all", comment= "ZAP scan type, supported any combination of: 'all', 'xss', 'sqli'" ) data_obj.insert(len(data_obj), "target", "http://*****:*****@ssw0rd", comment="(optional) User password for authenticated scan") data_obj.insert( len(data_obj), "auth_script", CommentedSeq(), comment="(optional) Selenium-like script for authenticated scan") script_obj = data_obj["auth_script"] for command in [{ "command": "open", "target": "http://app:8080/", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_login", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_password", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_0", "value": "" }, { "command": "type", "target": "id=login_login", "value": "%Username%" }, { "command": "type", "target": "id=login_password", "value": "%Password%" }, { "command": "clickAndWait", "target": "id=login_0", "value": "" }]: command_obj = CommentedMap() command_obj.fa.set_flow_style() for key in ["command", "target", "value"]: command_obj.insert(len(command_obj), key, command[key]) script_obj.append(command_obj)
class YAMLRoundtripConfig(MutableConfigFile, MutableAbstractItemAccessMixin, MutableAbstractDictFunctionsMixin): """ Class for YAML-based (roundtrip) configurations """ def __init__(self, owner: Any, manager: "m.StorageManager", path: str, *args: List[Any], **kwargs: Dict[Any, Any]): self.data = CommentedMap() super().__init__(owner, manager, path, *args, **kwargs) def load(self): with open(self.path, "r") as fh: self.data = yaml.round_trip_load(fh, version=(1, 2)) def reload(self): self.unload() self.load() def unload(self): self.data.clear() def save(self): if not self.mutable: raise RuntimeError("You may not modify a defaults file at runtime - check the mutable attribute!") with open(self.path, "w") as fh: yaml.round_trip_dump(self.data, fh) # region: CommentedMap functions def insert(self, pos, key, value, *, comment=None): """ Insert a `key: value` pair at the given position, attaching a comment if provided Wrapper for `CommentedMap.insert()` """ return self.data.insert(pos, key, value, comment) def add_eol_comment(self, comment, *, key=NoComment, column=30): """ Add an end-of-line comment for a key at a particular column (30 by default) Wrapper for `CommentedMap.yaml_add_eol_comment()` """ # Setting the column to None as the API actually defaults to will raise an exception, so we have to # specify one unfortunately return self.data.yaml_add_eol_comment(comment, key=key, column=column) def set_comment_before_key(self, key, comment, *, indent=0): """ Set a comment before a given key Wrapper for `CommentedMap.yaml_set_comment_before_after_key()` """ return self.data.yaml_set_comment_before_after_key( key, before=comment, indent=indent, after=None, after_indent=None ) def set_start_comment(self, comment, indent=0): """ Set the starting comment Wrapper for `CommentedMap.yaml_set_start_comment()` """ return self.data.yaml_set_start_comment(comment, indent=indent) # endregion # region: Dict functions def clear(self): return self.data.clear() def copy(self): return self.data.copy() def get(self, key, default=None): return self.data.get(key, default) def items(self): return self.data.items() def keys(self): return self.data.keys() def pop(self, key, default=None): return self.data.pop(key, default) def popitem(self): return self.data.popitem() def setdefault(self, key, default=None): if key not in self.data: self.data[key] = default return default return self.data[key] def update(self, other): return self.data.update(other) def values(self): return self.data.values() # endregion # Item access functions def __contains__(self, key): """ Wrapper for `dict.__contains__()` """ return self.data.__contains__(key) def __delitem__(self, key): """ Wrapper for `dict.__delitem__()` """ del self.data[key] def __getitem__(self, key): """ Wrapper for `dict.__getitem__()` """ return self.data.__getitem__(key) def __iter__(self): """ Wrapper for `dict.__iter__()` """ return self.data.__iter__() def __len__(self): """ Wrapper for `dict.__len__()` """ return self.data.__len__() def __setitem__(self, key, value): """ Wrapper for `dict.__getitem__()` """ return self.data.__setitem__(key, value)
def fill_config(data_obj): """ Make sample config """ data_obj.insert(len(data_obj), "qualys_api_server", "https://qualysapi.qualys.eu", comment="Qualys API server URL") data_obj.insert(len(data_obj), "qualys_login", "some-user", comment="Qualys user login") data_obj.insert(len(data_obj), "qualys_password", "S0m3P@ssw0rd", comment="Qualys user password") data_obj.insert(len(data_obj), "qualys_option_profile_id", 12345, comment="Qualys option profile ID") data_obj.insert(len(data_obj), "qualys_report_template_id", 12345, comment="Qualys report template ID") data_obj.insert(len(data_obj), "qualys_scanner_type", "EXTERNAL", comment="Qualys scanner type: EXTERNAL or INTERNAL") data_obj.insert( len(data_obj), "qualys_scanner_pool", CommentedSeq(), comment= "(INTERNAL only) Qualys scanner pool: list of scanner appliances to choose from" ) pool_obj = data_obj["qualys_scanner_pool"] pool_obj.append("MY_SCANNER_Name1") pool_obj.append("MY_SCANNER_Name2") pool_obj.append("MY_OTHERSCANNER_Name") data_obj.insert(len(data_obj), "random_name", False, comment="Use random project name") data_obj.insert(len(data_obj), "target", "http://*****:*****@ssw0rd", comment="(optional) User password for authenticated scan") data_obj.insert( len(data_obj), "auth_script", CommentedSeq(), comment="(optional) Selenium-like script for authenticated scan") script_obj = data_obj["auth_script"] for command in [{ "command": "open", "target": "%Target%/login", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_login", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_password", "value": "" }, { "command": "waitForElementPresent", "target": "id=login_0", "value": "" }, { "command": "type", "target": "id=login_login", "value": "%Username%" }, { "command": "type", "target": "id=login_password", "value": "%Password%" }, { "command": "clickAndWait", "target": "id=login_0", "value": "" }]: command_obj = CommentedMap() command_obj.fa.set_flow_style() for key in ["command", "target", "value"]: command_obj.insert(len(command_obj), key, command[key]) script_obj.append(command_obj) data_obj.insert( len(data_obj), "logged_in_indicator", "Logout", comment= "(optional) Response regex that is always present for authenticated user" ) data_obj.insert( len(data_obj), "sleep_interval", 10, comment="(optional) Seconds to sleep after creating new resource") data_obj.insert( len(data_obj), "status_check_interval", 60, comment= "(optional) Seconds to wait between scan/report status checks") data_obj.insert(len(data_obj), "retries", 10, comment="(optional) API request retry count") data_obj.insert(len(data_obj), "retry_delay", 30, comment="(optional) API request retry delay") data_obj.insert(len(data_obj), "timeout", 120, comment="(optional) API request timeout") data_obj.insert( len(data_obj), "save_intermediates_to", "/data/intermediates/dast", comment= "(optional) Save scan intermediates (raw results, logs, ...)")
def test_CommentedMapEquality(self): cm = CommentedMap((("b", 2),)) cm.insert(1, "a", 1, comment="a comment") self.assertEqual(cm, {"a": 1, "b": 2})
class MVYaml(object): protected_keys = ( '__current', '__type', ) def __init__(self, base64=False): self._b64 = base64 self._raw = CommentedMap() self._yaml = YAML() self._curr_version = None self._curr_data = None self._create() def _create(self): tag = self._make_tag() self._raw[tag] = CommentedMap() self._raw.insert(0, '__current', tag, 'current version') self._raw.insert(1, '__type', None, 'base64 if value are base64') self._commit(tag=tag, comment='Initial version') def import_yaml(self, file: AnyStr = None, stream: AnyStr = None): data = None if file: with open(file, 'r') as fp: data = fp.read() imported_data = self._yaml.load(data or stream) self.override(imported_data) return self def load(self, file_handler: AnyStr = None, stream_data: AnyStr = None): data = None if file_handler: with open(file_handler, 'r') as fp: data = fp.read() self._raw = self._yaml.load(data or stream_data) if self.protected_keys not in self._raw.keys(): raise MVYamlFileException( f'Not a valid mvyaml file. Perhaps is a yaml you want to import with ' f'import_yaml()?') return self def write(self, file_handler: IO = None, comment: AnyStr = None) -> [AnyStr, None]: if not self._raw: return if self._has_changes(): self._commit(comment=comment) output = file_handler or StringIO() self._yaml.dump(self._raw, output) return output.getvalue() if not file_handler else None @property def versions(self): if not self._raw: return [] return [k for k in self._raw.keys() if k not in self.protected_keys] @property def current(self): return self._raw['__current'] @property def data(self): if not self._curr_data: self._curr_data = deepcopy(self._raw[self._curr_version or self.current]) return self._curr_data def with_version(self, version: str = '__current'): if version not in self.versions: raise MVYamlVersionNotFoundException( f'version {version} not found') self._curr_version = version self._curr_data = None return self @staticmethod def _make_tag() -> str: d = datetime.utcnow().isoformat() return d def override(self, data: [Iterable]): self._curr_data = CommentedMap() self._curr_data.update(data) self._commit(comment='Overridden') return self def _commit(self, *args, **kwargs): return self._commit_head(*args, **kwargs) def _commit_head(self, tag: AnyStr = None, comment: AnyStr = None): """ apply the modifications on curr_data to the underling opened version and create a new tag """ commented_map = CommentedMap() commented_map.update(self._curr_data or self.data) if tag: self._raw[tag] = commented_map self._raw['__current'] = tag else: new_tag = self._make_tag() self._raw.insert(2, new_tag, commented_map, comment=comment) self._raw['__current'] = new_tag self._curr_version = None self._curr_data = None return self def _commit_tail(self, tag: AnyStr = None, comment: AnyStr = None): """ apply the modifications on curr_data to the underling opened version and create a new tag """ commented_map = CommentedMap() commented_map.update(self._curr_data or self.data) if tag: self._raw[tag] = commented_map self._raw['__current'] = tag else: new_tag = self._make_tag() self._raw.insert(len(self._raw.keys()), new_tag, commented_map, comment=comment) self._raw['__current'] = new_tag self._curr_version = None self._curr_data = None return self def _has_changes(self): orig = self._raw[self._curr_version or self.current] current = self._curr_data or self.data try: assert_equal(orig, current) except AssertionError: return True return False @property def changes(self) -> AnyStr: if not self._has_changes(): return '' yaml_orig = as_yaml(self._raw[self._curr_version or self.current]) yaml_curr = as_yaml(self._curr_data) differ = Differ() result = list( differ.compare(yaml_orig.splitlines(), yaml_curr.splitlines())) return '\n'.join(result) def set_current(self, version_label: AnyStr): if version_label not in self.versions: raise MVYamlVersionNotFoundException( f'request version [{version_label}] not found') self._raw['__current'] = version_label self.with_version(version_label) return self
def _merge_dicts( self, lhs: CommentedMap, rhs: CommentedMap, path: YAMLPath ) -> CommentedMap: """ Merge two YAML maps (CommentedMap-wrapped dicts). Parameters: 1. lhs (CommentedMap) The merge target. 2. rhs (CommentedMap) The merge source. 3. path (YAMLPath) Location within the DOM where this merge is taking place. Returns: (CommentedMap) The merged result. Raises: - `MergeException` when a clean merge is impossible. """ if not isinstance(lhs, CommentedMap): raise MergeException( "Impossible to add Hash data to non-Hash destination.", path) self.logger.debug( "Merging INTO dict with keys: {}:".format(", ".join([ str(k.value) if isinstance(k, TaggedScalar) else str(k) for k in lhs.keys()])), data=lhs, prefix="Merger::_merge_dicts: ", header="--------------------") self.logger.debug( "Merging FROM dict with keys: {}:".format(", ".join([ str(k.value) if isinstance(k, TaggedScalar) else str(k) for k in rhs.keys()])), data=rhs, prefix="Merger::_merge_dicts: ", footer="====================") # Delete all internal YAML merge reference keys lest any later # .insert() operation on LHS inexplicably convert them from reference # to concrete keys. This seems like a bug in ruamel.yaml... self._delete_mergeref_keys(lhs) # Assume deep merge until a node's merge rule indicates otherwise buffer: List[Tuple[Any, Any]] = [] buffer_pos = 0 for key, val in rhs.non_merged_items(): path_next = (path + YAMLPath.escape_path_section(key, path.seperator)) if key in lhs: # Write the buffer if populated for b_key, b_val in buffer: self.logger.debug( "Merger::_merge_dicts: Inserting key, {}, from" " buffer to position, {}, at path, {}." .format(b_key, buffer_pos, path_next), header="INSERT " * 15) self.logger.debug( "Before INSERT, the LHS document was:", data=lhs, prefix="Merger::_merge_dicts: ") self.logger.debug( "... and before INSERT, the incoming value will be:", data=b_val, prefix="Merger::_merge_dicts: ") lhs.insert(buffer_pos, b_key, b_val) self.logger.debug( "After INSERT, the LHS document became:", data=lhs, prefix="Merger::_merge_dicts: ") buffer_pos += 1 buffer = [] # Short-circuit the deep merge if a different merge rule # applies to this node. node_coord = NodeCoords(val, rhs, key) merge_mode = ( self.config.hash_merge_mode(node_coord) if isinstance(val, CommentedMap) else self.config.set_merge_mode(node_coord) if isinstance(val, CommentedSet) else self.config.aoh_merge_mode(node_coord) ) self.logger.debug("Merger::_merge_dicts: Got merge mode, {}." .format(merge_mode)) if merge_mode in ( HashMergeOpts.LEFT, AoHMergeOpts.LEFT, SetMergeOpts.LEFT ): continue if merge_mode in ( HashMergeOpts.RIGHT, AoHMergeOpts.RIGHT, SetMergeOpts.RIGHT ): self.logger.debug( "Merger::_merge_dicts: Overwriting key, {}, at path," " {}.".format(key, path_next), header="OVERWRITE " * 15) lhs[key] = val continue if isinstance(val, CommentedMap): lhs[key] = self._merge_dicts(lhs[key], val, path_next) # Synchronize any YAML Tag self.logger.debug( "Merger::_merge_dicts: Setting LHS tag from {} to {}." .format(lhs[key].tag.value, val.tag.value)) lhs[key].yaml_set_tag(val.tag.value) self.logger.debug( "Document BEFORE calling combine_merge_anchors:", data=lhs, prefix="Merger::_merge_dicts: ", header="+------------------+") Anchors.combine_merge_anchors(lhs[key], val) self.logger.debug( "Document AFTER calling combine_merge_anchors:", data=lhs, prefix="Merger::_merge_dicts: ", footer="+==================+") elif isinstance(val, CommentedSeq): lhs[key] = self._merge_lists( lhs[key], val, path_next, parent=rhs, parentref=key) # Synchronize any YAML Tag self.logger.debug( "Merger::_merge_dicts: Setting LHS tag from {} to {}." .format(lhs[key].tag.value, val.tag.value)) lhs[key].yaml_set_tag(val.tag.value) elif isinstance(val, CommentedSet): lhs[key] = self._merge_sets( lhs[key], val, path_next, node_coord) # Synchronize any YAML Tag self.logger.debug( "Merger::_merge_dicts: Setting LHS tag from {} to {}." .format(lhs[key].tag.value, val.tag.value)) lhs[key].yaml_set_tag(val.tag.value) else: self.logger.debug( "Merger::_merge_dicts: Updating key, {}, at path," " {}.".format(key, path_next), header="UPDATE " * 15) self.logger.debug( "Before UPDATE, the LHS document was:", data=lhs, prefix="Merger::_merge_dicts: ") self.logger.debug( "... and before UPDATE, the incoming value will be:", data=val, prefix="Merger::_merge_dicts: ") lhs[key] = val self.logger.debug( "After UPDATE, the LHS document became:", data=lhs, prefix="Merger::_merge_dicts: ") else: # LHS lacks the RHS key. Buffer this key-value pair in order # to insert it ahead of whatever key(s) follow this one in RHS # to keep anchor definitions before their aliases. buffer.append((key, val)) buffer_pos += 1 # Write any remaining buffered content to the end of LHS for b_key, b_val in buffer: self.logger.debug( "Merger::_merge_dicts: Appending key, {}, from buffer at" " path, {}.".format(b_key, path), header="APPEND " * 15) lhs[b_key] = b_val self.logger.debug( "Completed merge result for path, {}:".format(path), data=lhs, prefix="Merger::_merge_dicts: ") return lhs
def get_yaml_config(self, data: CM, indent: int = 0) -> CM: """Return YAML configuration In PfrConfiguration format. :param data: The registers settings data. :param indent: YAML start indent. :return: YAML PFR configuration in commented map(ordered dict). :raises SPSDKError: When there is no device found :raises SPSDKError: When there is no type found """ if not self.device: raise SPSDKError("Device not found") if not self.type: raise SPSDKError("Type not found") res_data = CM() res_data.yaml_set_start_comment( f"NXP {self.device} PFR {self.type} configuration", indent=indent) description = CM() description.insert(1, "device", self.device, comment="The NXP device name.") description.insert(2, "revision", self.revision, comment="The NXP device revision.") description.insert(3, "type", self.type.upper(), comment="The PFR type (CMPA, CFPA).") description.insert(4, "version", spsdk_version, comment="The SPSDK tool version.") description.insert(5, "author", spsdk_author, comment="The author of the configuration.") description.insert(6, "release", spsdk_release, comment="The SPSDK release.") res_data.insert( 1, "description", description, comment=f"The PFR {self.type} configuration description.", ) res_data.insert( 2, "settings", data, comment=f"The PFR {self.type} registers configuration.") return res_data
}, { "name": "message", "verbose_name": "Changelog message", "type": "str", "required": True, }, ], "output_file": DEFAULT_OUTPUT, PARTIAL_KEY_NAME: DEFAULT_PARTIAL_VALUE, "user_data": DEFAULT_USER_DATA, } ) DEFAULT_CONFIG.insert( 0, "context", {"issues_url": "http://repo/issues"}, comment="All variables defined here will be passed into templates", ) DEFAULT_CONFIG.insert( 1, "message_types", [ {"name": "feature", "title": "Features"}, {"name": "bug", "title": "Bug fixes"}, {"name": "doc", "title": "Documentation changes"}, {"name": "deprecation", "title": "Deprecations"}, {"name": "other", "title": "Other changes"}, ], comment="The order defined below will be preserved in the output changelog file", )