def _merge_simple_lists(self, lhs: CommentedSeq, rhs: CommentedSeq, path: YAMLPath, node_coord: NodeCoords) -> CommentedSeq: """ Merge two CommentedSeq-wrapped lists of Scalars or CommentedSeqs. Parameters: 1. lhs (CommentedSeq) The merge target. 2. rhs (CommentedSeq) The merge source. 3. path (YAMLPath) Location within the DOM where this merge is taking place. 4. node_coord (NodeCoords) The RHS root node, its parent, and reference within its parent; used for config lookups. Returns: (list) The merged result. Raises: - `MergeException` when a clean merge is impossible. """ if not isinstance(lhs, CommentedSeq): raise MergeException( "Impossible to add Array data to non-Array destination.", path) merge_mode = self.config.array_merge_mode(node_coord) if merge_mode is ArrayMergeOpts.LEFT: return lhs if merge_mode is ArrayMergeOpts.RIGHT: return rhs tagless_lhs = Nodes.tagless_elements(lhs) for idx, ele in enumerate(rhs): path_next = path + "[{}]".format(idx) self.logger.debug("Processing element {} at {}.".format( idx, path_next), prefix="Merger::_merge_simple_lists: ", data=ele) if merge_mode is ArrayMergeOpts.UNIQUE: cmp_val = ele if isinstance(ele, TaggedScalar): cmp_val = ele.value self.logger.debug( "Looking for comparison value, {}, in:".format(cmp_val), prefix="Merger::_merge_simple_lists: ", data=tagless_lhs) if cmp_val in tagless_lhs: lhs = CommentedSeq([ ele if (e == cmp_val or (isinstance(e, TaggedScalar) and e.value == cmp_val)) else e for e in lhs ]) else: lhs.append(ele) continue lhs.append(ele) return lhs
def test_update_tag(self): old_tag = "!tagged" new_tag = "!changed" old_node = ry.scalarstring.PlainScalarString("tagged value") tagged_node = ry.comments.TaggedScalar(old_node, tag=old_tag) new_node = Nodes.apply_yaml_tag(tagged_node, new_tag) assert new_node.tag.value == new_tag assert new_node.value == old_node
def _insert_scalar( self, insert_at: YAMLPath, lhs: Any, lhs_proc: Processor, rhs: Any ) -> bool: """Insert an RHS scalar into the LHS document.""" merge_performed = False if isinstance(lhs, CommentedSeq): self.logger.debug( "Merger::_insert_scalar: Merging a scalar into a list.") Nodes.append_list_element(lhs, rhs) merge_performed = True elif isinstance(lhs, CommentedSet): self.logger.debug( "Merger::_insert_scalar: Merging a scalar into a set.") self._merge_sets( lhs, CommentedSet([rhs]), insert_at, NodeCoords(rhs, None, None)) merge_performed = True elif isinstance(lhs, CommentedMap): ex_message = ( "Impossible to add Scalar value, {}, to a Hash without" " a key. Change the value to a 'key: value' pair, a" " '{{key: value}}' Hash, or change the merge target to" " an Array or other Scalar value." ).format(rhs) if len(str(rhs)) < 1 and not sys.stdin.isatty(): ex_message += ( " You may be seeing this because your workflow" " inadvertently opened a STDIN handle to {}. If" " this may be the case, try adding --nostdin or -S" " so as to block unintentional STDIN reading." ).format(basename(sys.argv[0])) raise MergeException(ex_message, insert_at) else: lhs_proc.set_value(insert_at, rhs) merge_performed = True return merge_performed
def _merge_sets( self, lhs: CommentedSet, rhs: CommentedSet, path: YAMLPath, node_coord: NodeCoords ) -> CommentedSet: """ Merge two YAML sets (CommentedSet-wrapped sets). Parameters: 1. lhs (CommentedSet) The merge target. 2. rhs (CommentedSet) The merge source. 3. path (YAMLPath) Location within the DOM where this merge is taking place. 4. node_coord (NodeCoords) The RHS root node, its parent, and reference within its parent; used for config lookups. Returns: (CommentedSet) The merged result. Raises: - `MergeException` when a clean merge is impossible. """ merge_mode = self.config.set_merge_mode(node_coord) if merge_mode is SetMergeOpts.LEFT: return lhs if merge_mode is SetMergeOpts.RIGHT: return rhs tagless_lhs = Nodes.tagless_elements(list(lhs)) for ele in rhs: path_next = (path + YAMLPath.escape_path_section(ele, path.seperator)) self.logger.debug( "Processing set element {} at {}.".format(ele, path_next), prefix="Merger::_merge_sets: ", data=ele) cmp_val = ele if isinstance(ele, TaggedScalar): cmp_val = ele.value self.logger.debug( "Looking for comparison value, {}, in:".format(cmp_val), prefix="Merger::_merge_sets: ", data=tagless_lhs) if cmp_val in tagless_lhs: continue lhs.add(ele) return lhs
def test_aoh_is_inconsistent(self): assert False == Nodes.node_is_aoh([{"key": "value"}, None])
def test_dict_to_str(self): assert "{}" == Nodes.make_new_node("", "{}", YAMLValueFormats.DEFAULT)
def test_aoh_node_is_none(self): assert False == Nodes.node_is_aoh(None)
def test_aoh_node_is_not_list(self): assert False == Nodes.node_is_aoh({"key": "value"})
def clone_node(*args): """Relay function call to static method.""" return Nodes.clone_node(*args)
def search_matches(method: PathSearchMethods, needle: str, haystack: Any) -> bool: """ Perform a search comparison. NOTE: For less-than, greather-than and related operations, the test is whether `haystack` is less/greater-than `needle`. Parameters: 1. method (PathSearchMethods) The search method to employ 2. needle (str) The value to look for. 3. haystack (Any) The value to look in. Returns: (bool) True = comparision passes; False = comparison fails. """ typed_haystack = Nodes.typed_value(haystack) typed_needle = Nodes.typed_value(needle) needle_type = type(typed_needle) matches: bool = False if method is PathSearchMethods.EQUALS: if isinstance(typed_haystack, bool) and needle_type is bool: matches = typed_haystack == typed_needle elif isinstance(typed_haystack, int) and needle_type is int: matches = typed_haystack == typed_needle elif isinstance(typed_haystack, float) and needle_type is float: matches = typed_haystack == typed_needle else: matches = str(typed_haystack) == str(needle) elif method is PathSearchMethods.STARTS_WITH: matches = str(typed_haystack).startswith(needle) elif method is PathSearchMethods.ENDS_WITH: matches = str(typed_haystack).endswith(needle) elif method is PathSearchMethods.CONTAINS: matches = needle in str(typed_haystack) elif method is PathSearchMethods.GREATER_THAN: if isinstance(typed_haystack, int): if isinstance(typed_needle, (int, float)): matches = typed_haystack > typed_needle else: matches = False elif isinstance(typed_haystack, float): if isinstance(typed_needle, (int, float)): matches = typed_haystack > typed_needle else: matches = False else: matches = str(typed_haystack) > str(needle) elif method is PathSearchMethods.LESS_THAN: if isinstance(typed_haystack, int): if isinstance(typed_needle, (int, float)): matches = typed_haystack < typed_needle else: matches = False elif isinstance(typed_haystack, float): if isinstance(typed_needle, (int, float)): matches = typed_haystack < typed_needle else: matches = False else: matches = str(typed_haystack) < str(needle) elif method is PathSearchMethods.GREATER_THAN_OR_EQUAL: if isinstance(typed_haystack, int): if isinstance(typed_needle, (int, float)): matches = typed_haystack >= typed_needle else: matches = False elif isinstance(typed_haystack, float): if isinstance(typed_needle, (int, float)): matches = typed_haystack >= typed_needle else: matches = False else: matches = str(typed_haystack) >= str(needle) elif method is PathSearchMethods.LESS_THAN_OR_EQUAL: if isinstance(typed_haystack, int): if isinstance(typed_needle, (int, float)): matches = typed_haystack <= typed_needle else: matches = False elif isinstance(typed_haystack, float): if isinstance(typed_needle, (int, float)): matches = typed_haystack <= typed_needle else: matches = False else: matches = str(typed_haystack) <= str(needle) elif method == PathSearchMethods.REGEX: matcher = re.compile(needle) matches = matcher.search(str(typed_haystack)) is not None else: raise NotImplementedError return matches
def append_list_element(*args): """Relay function call to static method.""" return Nodes.append_list_element(*args)
def test_tag_map(self): new_tag = "!something" old_node = ry.comments.CommentedMap({"key": "value"}) new_node = Nodes.apply_yaml_tag(old_node, new_tag) assert new_node.tag.value == new_tag
def min(data: Any, invert: bool, parameters: List[str], yaml_path: YAMLPath, **kwargs: Any) -> Generator[NodeCoords, None, None]: """ Find whichever nodes/elements have a minimum value. Parameters: 1. data (Any) The data to evaluate 2. invert (bool) Invert the evaluation 3. parameters (List[str]) Parsed parameters 4. yaml_path (YAMLPath) YAML Path begetting this operation Keyword Arguments: * parent (ruamel.yaml node) The parent node from which this query originates * parentref (Any) The Index or Key of data within parent * relay_segment (PathSegment) YAML Path segment presently under evaluation * translated_path (YAMLPath) YAML Path indicating precisely which node is being evaluated * ancestry (List[AncestryEntry]) Stack of ancestors preceding the present node under evaluation Returns: (Generator[NodeCoords, None, None]) each result as it is generated """ parent: Any = kwargs.pop("parent", None) parentref: Any = kwargs.pop("parentref", None) translated_path: YAMLPath = kwargs.pop("translated_path", YAMLPath("")) ancestry: List[AncestryEntry] = kwargs.pop("ancestry", []) relay_segment: PathSegment = kwargs.pop("relay_segment", None) # There may be 0 or 1 parameters param_count = len(parameters) if param_count > 1: raise YAMLPathException( ("Invalid parameter count to {}([NAME]); up to {} permitted, " " got {} in YAML Path").format(PathSearchKeywords.MIN, 1, param_count), str(yaml_path)) scan_node = parameters[0] if param_count > 0 else None match_value: Any = None match_nodes: List[NodeCoords] = [] discard_nodes: List[NodeCoords] = [] unwrapped_data: Any = NodeCoords.unwrap_node_coords(data) if Nodes.node_is_aoh(unwrapped_data, accept_nulls=True): # A named child node is mandatory if scan_node is None: raise YAMLPathException(( "The {}([NAME]) Search Keyword requires a key name to scan" " when evaluating an Array-of-Hashes in YAML Path").format( PathSearchKeywords.MIN), str(yaml_path)) for idx, wrapped_ele in enumerate(data): ele = NodeCoords.unwrap_node_coords(wrapped_ele) next_path = translated_path + "[{}]".format(idx) next_ancestry = ancestry + [(data, idx)] if ele is not None and scan_node in ele: eval_val = ele[scan_node] if (match_value is None or Searches.search_matches( PathSearchMethods.LESS_THAN, match_value, eval_val)): match_value = eval_val discard_nodes.extend(match_nodes) match_nodes = [ NodeCoords(ele, data, idx, next_path, next_ancestry, relay_segment) ] continue if (match_value is None or Searches.search_matches( PathSearchMethods.EQUALS, match_value, eval_val)): match_nodes.append( NodeCoords(ele, data, idx, next_path, next_ancestry, relay_segment)) continue discard_nodes.append( NodeCoords(ele, data, idx, next_path, next_ancestry, relay_segment)) elif isinstance(data, dict): # A named child node is mandatory if scan_node is None: raise YAMLPathException(( "The {}([NAME]) Search Keyword requires a key name to scan" " when comparing Hash/map/dict children in YAML Path" ).format(PathSearchKeywords.MIN), str(yaml_path)) for key, val in data.items(): next_ancestry = ancestry + [(data, key)] next_path = (translated_path + YAMLPath.escape_path_section( key, translated_path.seperator)) if isinstance(val, dict): if val is not None and scan_node in val: eval_val = val[scan_node] if (match_value is None or Searches.search_matches( PathSearchMethods.LESS_THAN, match_value, eval_val)): match_value = eval_val discard_nodes.extend(match_nodes) match_nodes = [ NodeCoords(val, data, key, next_path, next_ancestry, relay_segment) ] continue if (match_value is None or Searches.search_matches( PathSearchMethods.EQUALS, match_value, eval_val)): match_nodes.append( NodeCoords(val, data, key, next_path, next_ancestry, relay_segment)) continue elif scan_node in data: # The user probably meant to operate against the parent raise YAMLPathException( ("The {}([NAME]) Search Keyword operates against" " collections of data which share a common attribute" " yet there is only a single node to consider. Did" " you mean to evaluate the parent of the selected" " node? Please review your YAML Path").format( PathSearchKeywords.MIN), str(yaml_path)) discard_nodes.append( NodeCoords(val, data, key, next_path, next_ancestry, relay_segment)) elif isinstance(data, list): # A named child node is useless if scan_node is not None: raise YAMLPathException( ("The {}([NAME]) Search Keyword cannot utilize a key name" " when comparing Array/sequence/list elements to one" " another in YAML Path").format(PathSearchKeywords.MIN), str(yaml_path)) for idx, ele in enumerate(data): next_path = translated_path + "[{}]".format(idx) next_ancestry = ancestry + [(data, idx)] if (ele is not None and (match_value is None or Searches.search_matches( PathSearchMethods.LESS_THAN, match_value, ele))): match_value = ele discard_nodes.extend(match_nodes) match_nodes = [ NodeCoords(ele, data, idx, next_path, next_ancestry, relay_segment) ] continue if (ele is not None and Searches.search_matches( PathSearchMethods.EQUALS, match_value, ele)): match_nodes.append( NodeCoords(ele, data, idx, next_path, next_ancestry, relay_segment)) continue discard_nodes.append( NodeCoords(ele, data, idx, next_path, next_ancestry, relay_segment)) else: # Non-complex data is always its own maximum and does not invert match_value = data match_nodes = [ NodeCoords(data, parent, parentref, translated_path, ancestry, relay_segment) ] yield_nodes = discard_nodes if invert else match_nodes for node_coord in yield_nodes: yield node_coord
def _merge_arrays_of_hashes( self, lhs: CommentedSeq, rhs: CommentedSeq, path: YAMLPath, node_coord: NodeCoords ) -> CommentedSeq: """ Merge two Arrays-of-Hashes. This is a deep merge operation. Each dict is treated as a record with an identity key. RHS records are merged with LHS records for which the identity key matches. As such, an identity key is required in both LHS and RHS records. This key is configurable. When there is no LHS match for an RHS key, the RHS record is appended to the LHS list. Parameters: 1. lhs (CommentedSeq) The merge target. 2. rhs (CommentedSeq) The merge source. 3. path (YAMLPath) Location within the DOM where this merge is taking place. 4. node_coord (NodeCoords) The RHS root node, its parent, and reference within its parent; used for config lookups. Returns: (CommentedSeq) The merged result. Raises: - `MergeException` when a clean merge is impossible. """ if not isinstance(lhs, CommentedSeq): raise MergeException( "Impossible to add Array-of-Hash data to non-Array" " destination." , path) self.logger.debug( "Merging {} Hash(es) at {}.".format(len(rhs), path), prefix="Merger::_merge_arrays_of_hashes: ", data=rhs) id_key: str = "" if len(rhs) > 0 and isinstance(rhs[0], CommentedMap): id_key = self.config.aoh_merge_key( NodeCoords(rhs[0], rhs, 0), rhs[0]) self.logger.debug( "Merger::_merge_arrays_of_hashes: RHS AoH yielded id_key:" " {}.".format(id_key)) merge_mode = self.config.aoh_merge_mode(node_coord) for idx, ele in enumerate(rhs): path_next = path + "[{}]".format(idx) self.logger.debug( "Processing element #{} at {}.".format(idx, path_next), prefix="Merger::_merge_arrays_of_hashes: ", data=ele) if merge_mode is AoHMergeOpts.DEEP: if id_key in ele: id_val = Nodes.tagless_value(ele[id_key]) else: raise MergeException( "Mandatory identity key, {}, not present in Hash with" " keys: {}." .format(id_key, ", ".join(ele.keys())) , path_next ) merged_hash = False for lhs_hash in ( lhs_hash for lhs_hash in lhs if isinstance(lhs_hash, CommentedMap) and id_key in lhs_hash and Nodes.tagless_value(lhs_hash[id_key]) == id_val ): self._merge_dicts(lhs_hash, ele, path_next) merged_hash = True # Synchronize YAML Tags lhs_hash.yaml_set_tag(ele.tag.value) break if not merged_hash: Nodes.append_list_element(lhs, ele, ele.anchor.value if hasattr(ele, "anchor") else None) elif merge_mode is AoHMergeOpts.UNIQUE: if ele not in lhs: Nodes.append_list_element( lhs, ele, ele.anchor.value if hasattr(ele, "anchor") else None) else: Nodes.append_list_element(lhs, ele, ele.anchor.value if hasattr(ele, "anchor") else None) return lhs
def _has_anchored_child( data: Any, invert: bool, parameters: List[str], yaml_path: YAMLPath, **kwargs: Any) -> Generator[NodeCoords, None, None]: """ Indicate whether data has an anchored child. Parameters: 1. data (Any) The data to evaluate 2. invert (bool) Invert the evaluation 3. parameters (List[str]) Parsed parameters 4. yaml_path (YAMLPath) YAML Path begetting this operation Keyword Arguments: * parent (ruamel.yaml node) The parent node from which this query originates * parentref (Any) The Index or Key of data within parent * relay_segment (PathSegment) YAML Path segment presently under evaluation * translated_path (YAMLPath) YAML Path indicating precisely which node is being evaluated * ancestry (List[AncestryEntry]) Stack of ancestors preceding the present node under evaluation Returns: (Generator[NodeCoords, None, None]) each result as it is generated """ parent: Any = kwargs.pop("parent", None) parentref: Any = kwargs.pop("parentref", None) translated_path: YAMLPath = kwargs.pop("translated_path", YAMLPath("")) ancestry: List[AncestryEntry] = kwargs.pop("ancestry", []) relay_segment: PathSegment = kwargs.pop("relay_segment", None) match_key = parameters[0] anchor_name = match_key[1:] if match_key[0] == "&" else match_key if isinstance(data, CommentedMap): # Look for YAML Merge Keys by the Anchor name all_data = ancestry[0][0] if len(ancestry) > 0 else data all_anchors: Dict[str, Any] = {} Anchors.scan_for_anchors(all_data, all_anchors) compare_node = (all_anchors[anchor_name] if anchor_name in all_anchors else None) is_ymk_anchor = (compare_node is not None and isinstance(compare_node, dict)) if is_ymk_anchor: child_present = False if hasattr(data, "merge") and len(data.merge) > 0: # Ignore comparision if there is no source for (idx, merge_node) in data.merge: if merge_node == compare_node: child_present = True break if ((invert and not child_present) or (child_present and not invert)): yield NodeCoords(data, parent, parentref, translated_path, ancestry, relay_segment) return # Look for Anchored keys; include merged nodes else: child_present = False for (key, val) in data.items(): key_anchor = Anchors.get_node_anchor(key) val_anchor = Anchors.get_node_anchor(val) if key_anchor and key_anchor == anchor_name: child_present = True break if val_anchor and val_anchor == anchor_name: child_present = True break if ((invert and not child_present) or (child_present and not invert)): yield NodeCoords(data, parent, parentref, translated_path, ancestry, relay_segment) elif Nodes.node_is_aoh(data, accept_nulls=True): for idx, ele in enumerate(data): if ele is None: continue next_path = translated_path.append("[{}]".format(str(idx))) next_ancestry = ancestry + [(data, idx)] for aoh_match in KeywordSearches._has_anchored_child( ele, invert, parameters, yaml_path, parent=data, parentref=idx, translated_path=next_path, ancestry=next_ancestry): yield aoh_match elif isinstance(data, list): child_present = False for ele in data: ele_anchor = Anchors.get_node_anchor(ele) if ele_anchor and ele_anchor == anchor_name: child_present = True break if ((invert and not child_present) or (child_present and not invert)): yield NodeCoords(data, parent, parentref, translated_path, ancestry, relay_segment)
def _has_concrete_child( data: Any, invert: bool, parameters: List[str], yaml_path: YAMLPath, **kwargs: Any) -> Generator[NodeCoords, None, None]: """ Indicate whether data has a named child. Parameters: 1. data (Any) The data to evaluate 2. invert (bool) Invert the evaluation 3. parameters (List[str]) Parsed parameters 4. yaml_path (YAMLPath) YAML Path begetting this operation Keyword Arguments: * parent (ruamel.yaml node) The parent node from which this query originates * parentref (Any) The Index or Key of data within parent * relay_segment (PathSegment) YAML Path segment presently under evaluation * translated_path (YAMLPath) YAML Path indicating precisely which node is being evaluated * ancestry (List[AncestryEntry]) Stack of ancestors preceding the present node under evaluation Returns: (Generator[NodeCoords, None, None]) each result as it is generated """ parent: Any = kwargs.pop("parent", None) parentref: Any = kwargs.pop("parentref", None) translated_path: YAMLPath = kwargs.pop("translated_path", YAMLPath("")) ancestry: List[AncestryEntry] = kwargs.pop("ancestry", []) relay_segment: PathSegment = kwargs.pop("relay_segment", None) match_key = parameters[0] # Against a map, this will return nodes which have an immediate # child key exactly named as per parameters. When inverted, only # parents with no such key are yielded. if isinstance(data, dict): child_present = data is not None and match_key in data if ((invert and not child_present) or (child_present and not invert)): yield NodeCoords(data, parent, parentref, translated_path, ancestry, relay_segment) # Against a list, this will merely require an exact match between # parameters and any list elements. When inverted, every # non-matching element is yielded. elif isinstance(data, list): # Against an AoH, this will scan each element's immediate children, # treating and yielding as if this search were performed directly # against each map in the list. if Nodes.node_is_aoh(data): for idx, ele in enumerate(data): next_path = translated_path.append("[{}]".format(str(idx))) for aoh_match in KeywordSearches._has_concrete_child( ele, invert, parameters, yaml_path, parent=data, parentref=idx, translated_path=next_path): yield aoh_match return child_present = match_key in data if ((invert and not child_present) or (child_present and not invert)): yield NodeCoords(data, parent, parentref, translated_path, ancestry, relay_segment) elif data is None: if invert: yield NodeCoords(data, parent, parentref, translated_path, ancestry, relay_segment) else: raise YAMLPathException( ("{} data has no child nodes in YAML Path").format(type(data)), str(yaml_path))
def merge_with(self, rhs: Any) -> None: """ Merge this document with another. Parameters: 1. rhs (Any) The document to merge into this one. Returns: N/A Raises: - `MergeException` when a clean merge is impossible. """ # Do nothing when RHS is None (empty document) if rhs is None: return # Remove all comments (no sensible way to merge them) Parsers.delete_all_comments(rhs) # When LHS is None (empty document), just dump all of RHS into it, # honoring any --mergeat|-m location as best as possible. insert_at = self.config.get_insertion_point() if self.data is None: self.logger.debug("Replacing None data with:", prefix="Merger::merge_with: ", data=rhs, data_header=" *****") self.data = Nodes.build_next_node(insert_at, 0, rhs) self.logger.debug("Merged document is now:", prefix="Merger::merge_with: ", data=self.data, footer=" ***** ***** *****") if isinstance(rhs, (dict, list)): # Only Scalar values need further processing return # Resolve any anchor conflicts self._resolve_anchor_conflicts(rhs) # Prepare the merge rules self.config.prepare(rhs) # Identify a reasonable default should a DOM need to be built up to # receive the RHS data. default_val = rhs if isinstance(rhs, CommentedMap): default_val = {} elif isinstance(rhs, CommentedSeq): default_val = [] # Loop through all insertion points and the elements in RHS merge_performed = False nodes: List[NodeCoords] = [] lhs_proc = Processor(self.logger, self.data) for node_coord in lhs_proc.get_nodes(insert_at, default_value=default_val): nodes.append(node_coord) for node_coord in nodes: target_node = (node_coord.node if isinstance( node_coord.node, (CommentedMap, CommentedSeq)) else node_coord.parent) Parsers.set_flow_style(rhs, (target_node.fa.flow_style() if hasattr( target_node, "fa") else None)) if isinstance(rhs, CommentedMap): # The RHS document root is a map if isinstance(target_node, CommentedSeq): # But the destination is a list self._merge_lists(target_node, CommentedSeq([rhs]), insert_at) else: self._merge_dicts(target_node, rhs, insert_at) # Synchronize YAML Tags self.logger.debug( "Merger::merge_with: Setting LHS tag from {} to {}.". format(target_node.tag.value, rhs.tag.value)) target_node.yaml_set_tag(rhs.tag.value) merge_performed = True elif isinstance(rhs, CommentedSeq): # The RHS document root is a list self._merge_lists(target_node, rhs, insert_at) merge_performed = True # Synchronize any YAML Tag self.logger.debug( "Merger::merge_with: Setting LHS tag from {} to {}.". format(target_node.tag.value, rhs.tag.value)) target_node.yaml_set_tag(rhs.tag.value) else: # The RHS document root is a Scalar value target_node = node_coord.node if isinstance(target_node, CommentedSeq): Nodes.append_list_element(target_node, rhs) merge_performed = True elif isinstance(target_node, CommentedMap): raise MergeException( "Impossible to add Scalar value, {}, to a Hash without" " a key. Change the value to a 'key: value' pair, a" " '{{key: value}}' Hash, or change the merge target to" " an Array or other Scalar value.".format(rhs), insert_at) else: lhs_proc.set_value(insert_at, rhs) merge_performed = True self.logger.debug("Completed merge operation, resulting in document:", prefix="Merger::merge_with: ", data=self.data) if not merge_performed: raise MergeException( "A merge was not performed. Ensure your target path matches" " at least one node in the left document(s).", insert_at)
def test_list_to_str(self): assert "[]" == Nodes.make_new_node("", "[]", YAMLValueFormats.DEFAULT)
def wrap_type(*args): """Relay function call to static method.""" return Nodes.wrap_type(*args)
def test_anchored_string(self): node = ry.scalarstring.PlainScalarString("value") node.yaml_set_anchor("anchored") new_node = Nodes.make_new_node(node, "new", YAMLValueFormats.DEFAULT) assert new_node.anchor.value == node.anchor.value
def merge_with(self, rhs: Any) -> None: """ Merge this document with another. Parameters: 1. rhs (Any) The document to merge into this one. Returns: N/A Raises: - `MergeException` when a clean merge is impossible. """ # Do nothing when RHS is None (empty document) if rhs is None: return # Remove all comments (no sensible way to merge them) Parsers.delete_all_comments(rhs) # When LHS is None (empty document), just dump all of RHS into it, # honoring any --mergeat|-m location as best as possible. insert_at = self.config.get_insertion_point() if self.data is None: self.logger.debug( "Replacing None data with:", prefix="Merger::merge_with: ", data=rhs, data_header=" *****") self.data = Nodes.build_next_node(insert_at, 0, rhs) self.logger.debug( "Merged document is now:", prefix="Merger::merge_with: ", data=self.data, footer=" ***** ***** *****") if isinstance(rhs, (dict, list, CommentedSet, set)): # Only Scalar values need further processing return # Resolve any anchor conflicts self._resolve_anchor_conflicts(rhs) # Prepare the merge rules self.config.prepare(rhs) # Merge into each insertion point merge_performed = False lhs_proc = Processor(self.logger, self.data) for node_coord in self._get_merge_target_nodes( insert_at, lhs_proc, rhs ): target_node = node_coord.node Parsers.set_flow_style( rhs, (target_node.fa.flow_style() if hasattr(target_node, "fa") else None)) if isinstance(rhs, CommentedMap): # The RHS document root is a dict merge_performed = self._insert_dict( insert_at, target_node, rhs) elif isinstance(rhs, CommentedSeq): # The RHS document root is a list merge_performed = self._insert_list( insert_at, target_node, rhs) elif isinstance(rhs, CommentedSet): # The RHS document is a set merge_performed = self._insert_set( insert_at, target_node, rhs) else: # The RHS document root is a Scalar value merge_performed = self._insert_scalar( insert_at, target_node, lhs_proc, rhs) self.logger.debug( "Completed merge operation, resulting in document:", prefix="Merger::merge_with: ", data=self.data) if not merge_performed: raise MergeException( "A merge was not performed. Ensure your target path matches" " at least one node in the left document(s).", insert_at)
def build_next_node(*args): """Relay function call to static method.""" return Nodes.build_next_node(*args)
def make_new_node(*args): """Relay function call to static method.""" return Nodes.make_new_node(*args)