def test_aoh_merge_key_default(self, quiet_logger, tmp_path_factory):
        lhs_yaml_file = create_temp_yaml_file(
            tmp_path_factory, """---
        hash:
          lhs_exclusive: lhs value 1
          merge_targets:
            subkey: lhs value 2
            subarray:
              - one
              - two
        array_of_hashes:
          - name: LHS Record 1
            id: 1
            prop: LHS value AoH 1
          - name: LHS Record 2
            id: 2
            prop: LHS value AoH 2
        """)
        lhs_yaml = get_yaml_editor()
        (lhs_data, lhs_loaded) = get_yaml_data(lhs_yaml, quiet_logger,
                                               lhs_yaml_file)

        mc = MergerConfig(quiet_logger, SimpleNamespace())
        mc.prepare(lhs_data)

        node = lhs_data["array_of_hashes"]
        parent = lhs_data
        parentref = "array_of_hashes"
        record = node[0]

        assert mc.aoh_merge_key(NodeCoords(node, parent, parentref),
                                record) == "name"
Exemplo n.º 2
0
    def _merge_lists(
        self, lhs: CommentedSeq, rhs: CommentedSeq, path: YAMLPath,
        **kwargs: Any
    ) -> CommentedSeq:
        """
        Merge two lists; understands lists-of-dicts.

        Parameters:
        1. lhs (CommentedSeq) The list to merge into.
        2. rhs (CommentedSeq) The list to merge from.
        3. path (YAMLPath) Location of the `rhs` source list within its DOM.

        Keyword Arguments:
        * parent (Any) Parent node of `rhs`
        * parentref (Any) Child Key or Index of `rhs` within `parent`.

        Returns:  (CommentedSeq) The merged result.
        """
        parent: Any = kwargs.pop("parent", None)
        parentref: Any = kwargs.pop("parentref", None)
        node_coord = NodeCoords(rhs, parent, parentref)
        if len(rhs) > 0:
            if isinstance(rhs[0], CommentedMap):
                # This list is an Array-of-Hashes
                return self._merge_arrays_of_hashes(lhs, rhs, path, node_coord)

            # This list is an Array-of-Arrays or a simple list of Scalars
            return self._merge_simple_lists(lhs, rhs, path, node_coord)

        # No RHS list
        return lhs
Exemplo n.º 3
0
    def test_aoh_diff_key_default(self, quiet_logger, tmp_path_factory):
        lhs_yaml_file = create_temp_yaml_file(
            tmp_path_factory, """---
        hash:
          lhs_exclusive: lhs value 1
          diff_targets:
            subkey: lhs value 2
            subarray:
              - one
              - two
        array_of_hashes:
          - name: LHS Record 1
            id: 1
            prop: LHS value AoH 1
          - name: LHS Record 2
            id: 2
            prop: LHS value AoH 2
        """)
        lhs_yaml = get_yaml_editor()
        (lhs_data, lhs_loaded) = get_yaml_data(lhs_yaml, quiet_logger,
                                               lhs_yaml_file)

        mc = DifferConfig(quiet_logger, SimpleNamespace())
        mc.prepare(lhs_data)

        parent = lhs_data["array_of_hashes"]
        parentref = 0
        node = parent[parentref]
        nc = NodeCoords(node, parent, parentref)
        (key_attr, is_user_defined) = mc.aoh_diff_key(nc)

        assert key_attr == "name" and is_user_defined == False
    def test_set_merge_mode_ini_rule_overrides_cli(self, quiet_logger,
                                                   tmp_path_factory, cli,
                                                   ini_default, ini_rule,
                                                   mode):
        config_file = create_temp_yaml_file(
            tmp_path_factory, """
        [defaults]
        sets = {}
        [rules]
        /hash/merge_targets/subset = {}
        """.format(ini_default, ini_rule))
        lhs_yaml_file = create_temp_yaml_file(
            tmp_path_factory, """---
        hash:
          lhs_exclusive: lhs value 1
          merge_targets:
            subkey: lhs value 2
            subset:
              ? one
              ? two
        """)
        lhs_yaml = get_yaml_editor()
        (lhs_data, lhs_loaded) = get_yaml_data(lhs_yaml, quiet_logger,
                                               lhs_yaml_file)

        mc = MergerConfig(quiet_logger,
                          SimpleNamespace(config=config_file, sets=cli))
        mc.prepare(lhs_data)

        node = lhs_data["hash"]["merge_targets"]["subset"]
        parent = lhs_data["hash"]["merge_targets"]
        parentref = "subset"

        assert mc.set_merge_mode(NodeCoords(node, parent, parentref)) == mode
Exemplo n.º 5
0
    def _diff_lists(self, path: YAMLPath, lhs: CommentedSeq, rhs: CommentedSeq,
                    **kwargs) -> None:
        """
        Diff two lists.

        Parameters:
        1. path (YAMLPath) YAML Path to the document element under evaluation
        2. lhs (Any) The left-hand-side (original) document
        3. rhs (Any) The right-hand-side (altered) document

        Keyword Arguments:
        * rhs_parent (Any) Parent data node of rhs
        * parentref (Any) Reference indicating rhs within rhs_parent

        Returns:  N/A
        """
        self.logger.debug("Comparing LHS:",
                          prefix="Differ::_diff_lists:  ",
                          data=lhs)
        self.logger.debug("Against RHS:",
                          prefix="Differ::_diff_lists:  ",
                          data=rhs)

        parent: Any = kwargs.pop("rhs_parent", None)
        parentref: Any = kwargs.pop("parentref", None)
        node_coord = NodeCoords(rhs, parent, parentref)
        if len(rhs) > 0:
            if isinstance(rhs[0], CommentedMap):
                # This list is an Array-of-Hashes
                self._diff_arrays_of_hashes(path, lhs, rhs, node_coord)
            else:
                # This list is an Array-of-Arrays or a simple list of Scalars
                self._diff_arrays_of_scalars(path, lhs, rhs, node_coord)
Exemplo n.º 6
0
    def _insert_dict(
        self, insert_at: YAMLPath,
        lhs: Union[CommentedMap, CommentedSeq, CommentedSet],
        rhs: CommentedMap
    ) -> bool:
        """Insert an RHS dict merge result into the LHS document."""
        merge_performed = False
        merged_data: Union[
            CommentedMap, CommentedSeq, CommentedSet
        ] = CommentedMap()
        if isinstance(lhs, CommentedSeq):
            # Merge a dict into a list
            self.logger.debug(
                "Merger::_insert_dict:  Merging a dict into a list.")
            merged_data = self._merge_lists(
                lhs, CommentedSeq([rhs]), insert_at)
            merge_performed = True
        elif isinstance(lhs, CommentedSet):
            # Merge a dict into a set; this is destructive
            raise MergeException(
                "Merging a Hash into a Set is destructive to the"
                " source Hash because only the keys would be"
                " preserved.  Please adjust your merge to target a"
                " suitable node.", insert_at)
        else:
            # Merge a dict into a dict
            self.logger.debug(
                "Merger::_insert_dict:  Merging a dict into a dict.")

            merge_mode = self.config.hash_merge_mode(
                NodeCoords(rhs, None, None))
            if merge_mode is HashMergeOpts.LEFT:
                self.logger.debug(
                    "Configured mode short-circuits the merge; returning LHS:"
                    , prefix="Merger::_insert_dict:  "
                    , data=lhs)
                merged_data = lhs
            elif merge_mode is HashMergeOpts.RIGHT:
                self.logger.debug(
                    "Configured mode short-circuits the merge; returning RHS:"
                    , prefix="Merger::_insert_dict:  "
                    , data=rhs)
                merged_data = rhs
            else:
                merged_data = self._merge_dicts(lhs, rhs, insert_at)

            merge_performed = True

        # Synchronize YAML Tags
        self.logger.debug(
            "Merger::_insert_dict:  Setting LHS tag from {} to {}."
            .format(lhs.tag.value, rhs.tag.value))
        lhs.yaml_set_tag(rhs.tag.value)

        if insert_at.is_root:
            self.data = merged_data
        return merge_performed
 def test_hash_merge_mode_ini(self, quiet_logger, tmp_path_factory, setting,
                              mode):
     config_file = create_temp_yaml_file(
         tmp_path_factory, """
     [defaults]
     hashes = {}
     """.format(setting))
     mc = MergerConfig(quiet_logger,
                       SimpleNamespace(config=config_file, hashes=None))
     assert mc.hash_merge_mode(NodeCoords(None, None, None)) == mode
Exemplo n.º 8
0
 def test_array_diff_mode_ini(self, quiet_logger, tmp_path_factory, setting,
                              mode):
     config_file = create_temp_yaml_file(
         tmp_path_factory, """
     [defaults]
     arrays = {}
     """.format(setting))
     mc = DifferConfig(quiet_logger,
                       SimpleNamespace(config=config_file, arrays=None))
     assert mc.array_diff_mode(NodeCoords(None, None, None)) == mode
Exemplo n.º 9
0
 def test_array_diff_mode_cli_overrides_ini_defaults(
         self, quiet_logger, tmp_path_factory, cli, ini, mode):
     config_file = create_temp_yaml_file(
         tmp_path_factory, """
     [defaults]
     arrays = {}
     """.format(ini))
     mc = DifferConfig(quiet_logger,
                       SimpleNamespace(config=config_file, arrays=cli))
     assert mc.array_diff_mode(NodeCoords(None, None, None)) == mode
Exemplo n.º 10
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    yaml_path = YAMLPath(args.query, pathsep=args.pathsep)

    # Prep the YAML parser
    yaml = Parsers.get_yaml_editor()

    # Attempt to open the YAML file; check for parsing errors
    (yaml_data, doc_loaded) = Parsers.get_yaml_data(
        yaml, log, args.yaml_file if args.yaml_file else "-")
    if not doc_loaded:
        # An error message has already been logged
        sys.exit(1)

    # Seek the queried value(s)
    discovered_nodes = []
    processor = EYAMLProcessor(log,
                               yaml_data,
                               binary=args.eyaml,
                               publickey=args.publickey,
                               privatekey=args.privatekey)
    try:
        for node in processor.get_eyaml_values(yaml_path, mustexist=True):
            log.debug("Got node from {}:".format(yaml_path),
                      data=node,
                      prefix="yaml_get::main:  ")
            discovered_nodes.append(NodeCoords.unwrap_node_coords(node))
    except YAMLPathException as ex:
        log.critical(ex, 1)
    except EYAMLCommandException as ex:
        log.critical(ex, 2)

    try:
        for node in discovered_nodes:
            if isinstance(node, (dict, list, CommentedSet)):
                print(json.dumps(Parsers.jsonify_yaml_data(node)))
            else:
                if node is None:
                    node = "\x00"
                print("{}".format(str(node).replace("\n", r"\n")))
    except RecursionError:
        log.critical(
            "The YAML data contains an infinitely recursing YAML Alias!", 1)
Exemplo n.º 11
0
    def name(invert: bool, parameters: List[str], yaml_path: YAMLPath,
             **kwargs: Any) -> Generator[NodeCoords, None, None]:
        """
        Match only the key-name of the present node.

        Parameters:
        1. invert (bool) Invert the evaluation
        2. parameters (List[str]) Parsed parameters
        3. yaml_path (YAMLPath) YAML Path begetting this operation

        Keyword Arguments:
        * parent (ruamel.yaml node) The parent node from which this query
          originates
        * parentref (Any) The Index or Key of data within parent
        * relay_segment (PathSegment) YAML Path segment presently under
          evaluation
        * translated_path (YAMLPath) YAML Path indicating precisely which node
          is being evaluated
        * ancestry (List[AncestryEntry]) Stack of ancestors preceding the
          present node under evaluation

        Returns:  (Generator[NodeCoords, None, None]) each result as it is
            generated
        """
        parent: Any = kwargs.pop("parent", None)
        parentref: Any = kwargs.pop("parentref", None)
        translated_path: YAMLPath = kwargs.pop("translated_path", YAMLPath(""))
        ancestry: List[AncestryEntry] = kwargs.pop("ancestry", [])
        relay_segment: PathSegment = kwargs.pop("relay_segment", None)

        # There are no parameters
        param_count = len(parameters)
        if param_count > 1:
            raise YAMLPathException(
                ("Invalid parameter count to {}(); {} are permitted, "
                 " got {} in YAML Path").format(PathSearchKeywords.NAME, 0,
                                                param_count), str(yaml_path))

        if invert:
            raise YAMLPathException(
                ("Inversion is meaningless to {}()").format(
                    PathSearchKeywords.NAME), str(yaml_path))

        yield NodeCoords(parentref, parent, parentref, translated_path,
                         ancestry, relay_segment)
Exemplo n.º 12
0
    def _insert_set(
        self, insert_at: YAMLPath,
        lhs: Union[CommentedMap, CommentedSeq, CommentedSet],
        rhs: CommentedSet
    ) -> bool:
        """Insert an RHS list merge result into the LHS document."""
        merge_performed = False
        merged_data: Union[
            CommentedSeq, CommentedMap, CommentedSet, None] = None
        if isinstance(lhs, CommentedSeq):
            self.logger.debug(
                "Merger::_insert_set:  Merging a set into a list.")
            merge_list = []
            for ele in rhs:
                merge_list.append(ele)
            merged_data = self._merge_lists(
                lhs, CommentedSeq(merge_list), insert_at)
            merge_performed = True
        elif isinstance(lhs, CommentedMap):
            self.logger.debug(
                "Merger::_insert_set:  Merging a set into a dict.")
            merge_dict: Dict[str, None] = {}
            for ele in rhs:
                merge_dict[ele] = None
            merged_data = self._merge_dicts(
                lhs, CommentedMap(merge_dict), insert_at)
            merge_performed = True
        else:
            self.logger.debug(
                "Merger::_insert_set:  Merging a set into a set.")
            merged_data = self._merge_sets(lhs, rhs, insert_at, NodeCoords(
                rhs, None, None
            ))
            merge_performed = True

        # Synchronize any YAML Tag
        self.logger.debug(
            "Merger::_insert_set:  Setting LHS tag from {} to {}."
            .format(lhs.tag.value, rhs.tag.value))
        lhs.yaml_set_tag(rhs.tag.value)

        if insert_at.is_root:
            self.data = merged_data
        return merge_performed
Exemplo n.º 13
0
    def test_array_diff_mode_ini_rule_overrides_cli(self, quiet_logger,
                                                    tmp_path_factory, cli,
                                                    ini_default, ini_rule,
                                                    mode):
        config_file = create_temp_yaml_file(
            tmp_path_factory, """
        [defaults]
        arrays = {}
        [rules]
        /hash/diff_targets/subarray = {}
        """.format(ini_default, ini_rule))
        lhs_yaml_file = create_temp_yaml_file(
            tmp_path_factory, """---
        hash:
          lhs_exclusive: lhs value 1
          diff_targets:
            subkey: lhs value 2
            subarray:
              - one
              - two
        array_of_hashes:
          - name: LHS Record 1
            id: 1
            prop: LHS value AoH 1
          - name: LHS Record 2
            id: 2
            prop: LHS value AoH 2
        """)
        lhs_yaml = get_yaml_editor()
        (lhs_data, lhs_loaded) = get_yaml_data(lhs_yaml, quiet_logger,
                                               lhs_yaml_file)

        mc = DifferConfig(quiet_logger,
                          SimpleNamespace(config=config_file, arrays=cli))
        mc.prepare(lhs_data)

        node = lhs_data["hash"]["diff_targets"]["subarray"]
        parent = lhs_data["hash"]["diff_targets"]
        parentref = "subarray"

        assert mc.array_diff_mode(NodeCoords(node, parent, parentref)) == mode
Exemplo n.º 14
0
    def _insert_list(
        self, insert_at: YAMLPath,
        lhs: Union[CommentedMap, CommentedSeq, CommentedSet],
        rhs: CommentedSeq
    ) -> bool:
        """Insert an RHS list merge result into the LHS document."""
        merge_performed = False
        merged_data: Union[CommentedSeq, CommentedSet, None] = None
        if isinstance(lhs, CommentedSeq):
            # Merge list into list
            self.logger.debug(
                "Merger::_insert_list:  Merging a list into a list.")
            merged_data = self._merge_lists(
                lhs, rhs, insert_at)
            merge_performed = True
        elif isinstance(lhs, CommentedSet):
            # Merge list into set
            self.logger.debug(
                "Merger::_insert_list:  Merging a list into a set.")
            mset = CommentedSet()
            for ele in rhs:
                mset.add(ele)
            merged_data = self._merge_sets(
                lhs, mset, insert_at, NodeCoords(rhs, None, None))
            merge_performed = True
        else:
            # Merge list into hash
            raise MergeException(
                "Impossible to merge an Array into a Hash without a"
                " key given to receive the new elements.", insert_at)

        # Synchronize any YAML Tag
        self.logger.debug(
            "Merger::_insert_list:  Setting LHS tag from {} to {}."
            .format(lhs.tag.value, rhs.tag.value))
        lhs.yaml_set_tag(rhs.tag.value)

        if insert_at.is_root:
            self.data = merged_data
        return merge_performed
Exemplo n.º 15
0
    def _diff_lists(
        self, path: YAMLPath, lhs: CommentedSeq, rhs: CommentedSeq, **kwargs
    ) -> None:
        """Diff two lists."""
        self.logger.debug(
            "Comparing LHS:",
            prefix="Differ::_diff_lists:  ",
            data=lhs)
        self.logger.debug(
            "Against RHS:",
            prefix="Differ::_diff_lists:  ",
            data=rhs)

        parent: Any = kwargs.pop("rhs_parent", None)
        parentref: Any = kwargs.pop("parentref", None)
        node_coord = NodeCoords(rhs, parent, parentref)
        if len(rhs) > 0:
            if isinstance(rhs[0], CommentedMap):
                # This list is an Array-of-Hashes
                self._diff_arrays_of_hashes(path, lhs, rhs, node_coord)
            else:
                # This list is an Array-of-Arrays or a simple list of Scalars
                self._diff_arrays_of_scalars(path, lhs, rhs, node_coord)
Exemplo n.º 16
0
 def _insert_scalar(
     self, insert_at: YAMLPath, lhs: Any, lhs_proc: Processor, rhs: Any
 ) -> bool:
     """Insert an RHS scalar into the LHS document."""
     merge_performed = False
     if isinstance(lhs, CommentedSeq):
         self.logger.debug(
             "Merger::_insert_scalar:  Merging a scalar into a list.")
         Nodes.append_list_element(lhs, rhs)
         merge_performed = True
     elif isinstance(lhs, CommentedSet):
         self.logger.debug(
             "Merger::_insert_scalar:  Merging a scalar into a set.")
         self._merge_sets(
             lhs, CommentedSet([rhs]), insert_at,
             NodeCoords(rhs, None, None))
         merge_performed = True
     elif isinstance(lhs, CommentedMap):
         ex_message = (
             "Impossible to add Scalar value, {}, to a Hash without"
             " a key.  Change the value to a 'key: value' pair, a"
             " '{{key: value}}' Hash, or change the merge target to"
             " an Array or other Scalar value."
             ).format(rhs)
         if len(str(rhs)) < 1 and not sys.stdin.isatty():
             ex_message += (
                 "  You may be seeing this because your workflow"
                 " inadvertently opened a STDIN handle to {}.  If"
                 " this may be the case, try adding --nostdin or -S"
                 " so as to block unintentional STDIN reading."
             ).format(basename(sys.argv[0]))
         raise MergeException(ex_message, insert_at)
     else:
         lhs_proc.set_value(insert_at, rhs)
         merge_performed = True
     return merge_performed
Exemplo n.º 17
0
 def test_generic(self):
     node_coord = NodeCoords([], None, None)
Exemplo n.º 18
0
    def min(data: Any, invert: bool, parameters: List[str],
            yaml_path: YAMLPath,
            **kwargs: Any) -> Generator[NodeCoords, None, None]:
        """
        Find whichever nodes/elements have a minimum value.

        Parameters:
        1. data (Any) The data to evaluate
        2. invert (bool) Invert the evaluation
        3. parameters (List[str]) Parsed parameters
        4. yaml_path (YAMLPath) YAML Path begetting this operation

        Keyword Arguments:
        * parent (ruamel.yaml node) The parent node from which this query
          originates
        * parentref (Any) The Index or Key of data within parent
        * relay_segment (PathSegment) YAML Path segment presently under
          evaluation
        * translated_path (YAMLPath) YAML Path indicating precisely which node
          is being evaluated
        * ancestry (List[AncestryEntry]) Stack of ancestors preceding the
          present node under evaluation

        Returns:  (Generator[NodeCoords, None, None]) each result as it is
            generated
        """
        parent: Any = kwargs.pop("parent", None)
        parentref: Any = kwargs.pop("parentref", None)
        translated_path: YAMLPath = kwargs.pop("translated_path", YAMLPath(""))
        ancestry: List[AncestryEntry] = kwargs.pop("ancestry", [])
        relay_segment: PathSegment = kwargs.pop("relay_segment", None)

        # There may be 0 or 1 parameters
        param_count = len(parameters)
        if param_count > 1:
            raise YAMLPathException(
                ("Invalid parameter count to {}([NAME]); up to {} permitted, "
                 " got {} in YAML Path").format(PathSearchKeywords.MIN, 1,
                                                param_count), str(yaml_path))

        scan_node = parameters[0] if param_count > 0 else None
        match_value: Any = None
        match_nodes: List[NodeCoords] = []
        discard_nodes: List[NodeCoords] = []
        unwrapped_data: Any = NodeCoords.unwrap_node_coords(data)
        if Nodes.node_is_aoh(unwrapped_data, accept_nulls=True):
            # A named child node is mandatory
            if scan_node is None:
                raise YAMLPathException((
                    "The {}([NAME]) Search Keyword requires a key name to scan"
                    " when evaluating an Array-of-Hashes in YAML Path").format(
                        PathSearchKeywords.MIN), str(yaml_path))

            for idx, wrapped_ele in enumerate(data):
                ele = NodeCoords.unwrap_node_coords(wrapped_ele)
                next_path = translated_path + "[{}]".format(idx)
                next_ancestry = ancestry + [(data, idx)]
                if ele is not None and scan_node in ele:
                    eval_val = ele[scan_node]
                    if (match_value is None or Searches.search_matches(
                            PathSearchMethods.LESS_THAN, match_value,
                            eval_val)):
                        match_value = eval_val
                        discard_nodes.extend(match_nodes)
                        match_nodes = [
                            NodeCoords(ele, data, idx, next_path,
                                       next_ancestry, relay_segment)
                        ]
                        continue

                    if (match_value is None or Searches.search_matches(
                            PathSearchMethods.EQUALS, match_value, eval_val)):
                        match_nodes.append(
                            NodeCoords(ele, data, idx, next_path,
                                       next_ancestry, relay_segment))
                        continue

                discard_nodes.append(
                    NodeCoords(ele, data, idx, next_path, next_ancestry,
                               relay_segment))

        elif isinstance(data, dict):
            # A named child node is mandatory
            if scan_node is None:
                raise YAMLPathException((
                    "The {}([NAME]) Search Keyword requires a key name to scan"
                    " when comparing Hash/map/dict children in YAML Path"
                ).format(PathSearchKeywords.MIN), str(yaml_path))

            for key, val in data.items():
                next_ancestry = ancestry + [(data, key)]
                next_path = (translated_path + YAMLPath.escape_path_section(
                    key, translated_path.seperator))
                if isinstance(val, dict):
                    if val is not None and scan_node in val:
                        eval_val = val[scan_node]
                        if (match_value is None or Searches.search_matches(
                                PathSearchMethods.LESS_THAN, match_value,
                                eval_val)):
                            match_value = eval_val
                            discard_nodes.extend(match_nodes)
                            match_nodes = [
                                NodeCoords(val, data, key, next_path,
                                           next_ancestry, relay_segment)
                            ]
                            continue

                        if (match_value is None or Searches.search_matches(
                                PathSearchMethods.EQUALS, match_value,
                                eval_val)):
                            match_nodes.append(
                                NodeCoords(val, data, key, next_path,
                                           next_ancestry, relay_segment))
                            continue

                elif scan_node in data:
                    # The user probably meant to operate against the parent
                    raise YAMLPathException(
                        ("The {}([NAME]) Search Keyword operates against"
                         " collections of data which share a common attribute"
                         " yet there is only a single node to consider.  Did"
                         " you mean to evaluate the parent of the selected"
                         " node?  Please review your YAML Path").format(
                             PathSearchKeywords.MIN), str(yaml_path))

                discard_nodes.append(
                    NodeCoords(val, data, key, next_path, next_ancestry,
                               relay_segment))

        elif isinstance(data, list):
            # A named child node is useless
            if scan_node is not None:
                raise YAMLPathException(
                    ("The {}([NAME]) Search Keyword cannot utilize a key name"
                     " when comparing Array/sequence/list elements to one"
                     " another in YAML Path").format(PathSearchKeywords.MIN),
                    str(yaml_path))

            for idx, ele in enumerate(data):
                next_path = translated_path + "[{}]".format(idx)
                next_ancestry = ancestry + [(data, idx)]
                if (ele is not None
                        and (match_value is None or Searches.search_matches(
                            PathSearchMethods.LESS_THAN, match_value, ele))):
                    match_value = ele
                    discard_nodes.extend(match_nodes)
                    match_nodes = [
                        NodeCoords(ele, data, idx, next_path, next_ancestry,
                                   relay_segment)
                    ]
                    continue

                if (ele is not None and Searches.search_matches(
                        PathSearchMethods.EQUALS, match_value, ele)):
                    match_nodes.append(
                        NodeCoords(ele, data, idx, next_path, next_ancestry,
                                   relay_segment))
                    continue

                discard_nodes.append(
                    NodeCoords(ele, data, idx, next_path, next_ancestry,
                               relay_segment))

        else:
            # Non-complex data is always its own maximum and does not invert
            match_value = data
            match_nodes = [
                NodeCoords(data, parent, parentref, translated_path, ancestry,
                           relay_segment)
            ]

        yield_nodes = discard_nodes if invert else match_nodes
        for node_coord in yield_nodes:
            yield node_coord
Exemplo n.º 19
0
    def parent(data: Any, invert: bool, parameters: List[str],
               yaml_path: YAMLPath,
               **kwargs: Any) -> Generator[NodeCoords, None, None]:
        """
        Climb back up N parent levels in the data hierarchy.

        Parameters:
        1. data (Any) The data to evaluate
        2. invert (bool) Invert the evaluation; not possible for parent()
        3. parameters (List[str]) Parsed parameters
        4. yaml_path (YAMLPath) YAML Path begetting this operation

        Keyword Arguments:
        * parent (ruamel.yaml node) The parent node from which this query
          originates
        * parentref (Any) The Index or Key of data within parent
        * relay_segment (PathSegment) YAML Path segment presently under
          evaluation
        * translated_path (YAMLPath) YAML Path indicating precisely which node
          is being evaluated
        * ancestry (List[AncestryEntry]) Stack of ancestors preceding the
          present node under evaluation

        Returns:  (Generator[NodeCoords, None, None]) each result as it is
            generated
        """
        parent: Any = kwargs.pop("parent", None)
        parentref: Any = kwargs.pop("parentref", None)
        translated_path: YAMLPath = kwargs.pop("translated_path", YAMLPath(""))
        ancestry: List[AncestryEntry] = kwargs.pop("ancestry", [])
        relay_segment: PathSegment = kwargs.pop("relay_segment", None)

        # There may be 0 or 1 parameters
        param_count = len(parameters)
        if param_count > 1:
            raise YAMLPathException(
                ("Invalid parameter count to {}([STEPS]); up to {} permitted, "
                 " got {} in YAML Path").format(PathSearchKeywords.PARENT, 1,
                                                param_count), str(yaml_path))

        if invert:
            raise YAMLPathException(
                ("Inversion is meaningless to {}([STEPS])").format(
                    PathSearchKeywords.PARENT), str(yaml_path))

        parent_levels: int = 1
        ancestry_len: int = len(ancestry)
        steps_max = ancestry_len
        if param_count > 0:
            try:
                parent_levels = int(parameters[0])
            except ValueError as ex:
                raise YAMLPathException(
                    ("Invalid parameter passed to {}([STEPS]), {}; must be"
                     " unset or an integer number indicating how may parent"
                     " STEPS to climb in YAML Path").format(
                         PathSearchKeywords.PARENT, parameters[0]),
                    str(yaml_path)) from ex

        if parent_levels > steps_max:
            raise YAMLPathException(
                ("Cannot {}([STEPS]) higher than the document root.  {} steps"
                 " requested when {} available in YAML Path").format(
                     PathSearchKeywords.PARENT, parent_levels, steps_max),
                str(yaml_path))

        if parent_levels < 1:
            # parent(0) is the present node
            yield NodeCoords(data, parent, parentref, translated_path,
                             ancestry, relay_segment)
        else:
            for _ in range(parent_levels):
                translated_path.pop()
                (data, _) = ancestry.pop()
                ancestry_len -= 1

            parentref = ancestry[-1][1] if ancestry_len > 0 else None
            parent = ancestry[-1][0] if ancestry_len > 0 else None
            yield NodeCoords(data, parent, parentref, translated_path,
                             ancestry, relay_segment)
Exemplo n.º 20
0
 def test_array_diff_mode_cli(self, quiet_logger, setting, mode):
     mc = DifferConfig(quiet_logger, SimpleNamespace(arrays=setting))
     assert mc.array_diff_mode(NodeCoords(None, None, None)) == mode
Exemplo n.º 21
0
    def _merge_dicts(
        self, lhs: CommentedMap, rhs: CommentedMap, path: YAMLPath
    ) -> CommentedMap:
        """
        Merge two YAML maps (CommentedMap-wrapped dicts).

        Parameters:
        1. lhs (CommentedMap) The merge target.
        2. rhs (CommentedMap) The merge source.
        3. path (YAMLPath) Location within the DOM where this merge is taking
           place.

        Returns:  (CommentedMap) The merged result.

        Raises:
        - `MergeException` when a clean merge is impossible.
        """
        if not isinstance(lhs, CommentedMap):
            raise MergeException(
                "Impossible to add Hash data to non-Hash destination.", path)

        self.logger.debug(
            "Merging INTO dict with keys: {}:".format(", ".join([
                    str(k.value) if isinstance(k, TaggedScalar)
                    else str(k)
                    for k in lhs.keys()])),
            data=lhs, prefix="Merger::_merge_dicts:  ",
            header="--------------------")
        self.logger.debug(
            "Merging FROM dict with keys: {}:".format(", ".join([
                    str(k.value) if isinstance(k, TaggedScalar)
                    else str(k)
                    for k in rhs.keys()])),
            data=rhs, prefix="Merger::_merge_dicts:  ",
            footer="====================")

        # Delete all internal YAML merge reference keys lest any later
        # .insert() operation on LHS inexplicably convert them from reference
        # to concrete keys.  This seems like a bug in ruamel.yaml...
        self._delete_mergeref_keys(lhs)

        # Assume deep merge until a node's merge rule indicates otherwise
        buffer: List[Tuple[Any, Any]] = []
        buffer_pos = 0
        for key, val in rhs.non_merged_items():
            path_next = (path +
                YAMLPath.escape_path_section(key, path.seperator))
            if key in lhs:
                # Write the buffer if populated
                for b_key, b_val in buffer:
                    self.logger.debug(
                        "Merger::_merge_dicts:  Inserting key, {}, from"
                        " buffer to position, {}, at path, {}."
                        .format(b_key, buffer_pos, path_next),
                        header="INSERT " * 15)
                    self.logger.debug(
                        "Before INSERT, the LHS document was:",
                        data=lhs, prefix="Merger::_merge_dicts:  ")
                    self.logger.debug(
                        "... and before INSERT, the incoming value will be:",
                        data=b_val, prefix="Merger::_merge_dicts:  ")
                    lhs.insert(buffer_pos, b_key, b_val)
                    self.logger.debug(
                        "After INSERT, the LHS document became:",
                        data=lhs, prefix="Merger::_merge_dicts:  ")
                    buffer_pos += 1
                buffer = []

                # Short-circuit the deep merge if a different merge rule
                # applies to this node.
                node_coord = NodeCoords(val, rhs, key)
                merge_mode = (
                    self.config.hash_merge_mode(node_coord)
                    if isinstance(val, CommentedMap)
                    else self.config.set_merge_mode(node_coord)
                    if isinstance(val, CommentedSet)
                    else self.config.aoh_merge_mode(node_coord)
                )
                self.logger.debug("Merger::_merge_dicts:  Got merge mode, {}."
                                  .format(merge_mode))
                if merge_mode in (
                    HashMergeOpts.LEFT, AoHMergeOpts.LEFT, SetMergeOpts.LEFT
                ):
                    continue
                if merge_mode in (
                    HashMergeOpts.RIGHT, AoHMergeOpts.RIGHT, SetMergeOpts.RIGHT
                ):
                    self.logger.debug(
                        "Merger::_merge_dicts:  Overwriting key, {}, at path,"
                        " {}.".format(key, path_next),
                        header="OVERWRITE " * 15)
                    lhs[key] = val
                    continue

                if isinstance(val, CommentedMap):
                    lhs[key] = self._merge_dicts(lhs[key], val, path_next)

                    # Synchronize any YAML Tag
                    self.logger.debug(
                        "Merger::_merge_dicts:  Setting LHS tag from {} to {}."
                        .format(lhs[key].tag.value, val.tag.value))
                    lhs[key].yaml_set_tag(val.tag.value)

                    self.logger.debug(
                        "Document BEFORE calling combine_merge_anchors:",
                        data=lhs, prefix="Merger::_merge_dicts:  ",
                        header="+------------------+")
                    Anchors.combine_merge_anchors(lhs[key], val)
                    self.logger.debug(
                        "Document AFTER calling combine_merge_anchors:",
                        data=lhs, prefix="Merger::_merge_dicts:  ",
                        footer="+==================+")
                elif isinstance(val, CommentedSeq):
                    lhs[key] = self._merge_lists(
                        lhs[key], val, path_next, parent=rhs, parentref=key)

                    # Synchronize any YAML Tag
                    self.logger.debug(
                        "Merger::_merge_dicts:  Setting LHS tag from {} to {}."
                        .format(lhs[key].tag.value, val.tag.value))
                    lhs[key].yaml_set_tag(val.tag.value)
                elif isinstance(val, CommentedSet):
                    lhs[key] = self._merge_sets(
                        lhs[key], val, path_next, node_coord)

                    # Synchronize any YAML Tag
                    self.logger.debug(
                        "Merger::_merge_dicts:  Setting LHS tag from {} to {}."
                        .format(lhs[key].tag.value, val.tag.value))
                    lhs[key].yaml_set_tag(val.tag.value)
                else:
                    self.logger.debug(
                        "Merger::_merge_dicts:  Updating key, {}, at path,"
                        " {}.".format(key, path_next), header="UPDATE " * 15)
                    self.logger.debug(
                        "Before UPDATE, the LHS document was:",
                        data=lhs, prefix="Merger::_merge_dicts:  ")
                    self.logger.debug(
                        "... and before UPDATE, the incoming value will be:",
                        data=val, prefix="Merger::_merge_dicts:  ")
                    lhs[key] = val
                    self.logger.debug(
                        "After UPDATE, the LHS document became:",
                        data=lhs, prefix="Merger::_merge_dicts:  ")
            else:
                # LHS lacks the RHS key.  Buffer this key-value pair in order
                # to insert it ahead of whatever key(s) follow this one in RHS
                # to keep anchor definitions before their aliases.
                buffer.append((key, val))

            buffer_pos += 1

        # Write any remaining buffered content to the end of LHS
        for b_key, b_val in buffer:
            self.logger.debug(
                "Merger::_merge_dicts:  Appending key, {}, from buffer at"
                " path, {}.".format(b_key, path), header="APPEND " * 15)
            lhs[b_key] = b_val

        self.logger.debug(
            "Completed merge result for path, {}:".format(path),
            data=lhs, prefix="Merger::_merge_dicts:  ")

        return lhs
Exemplo n.º 22
0
 def test_array_diff_mode_default(self, quiet_logger):
     mc = DifferConfig(quiet_logger, SimpleNamespace(arrays=None))
     assert mc.array_diff_mode(NodeCoords(None, None,
                                          None)) == ArrayDiffOpts.POSITION
Exemplo n.º 23
0
    def _merge_arrays_of_hashes(
        self, lhs: CommentedSeq, rhs: CommentedSeq, path: YAMLPath,
        node_coord: NodeCoords
    ) -> CommentedSeq:
        """
        Merge two Arrays-of-Hashes.

        This is a deep merge operation.  Each dict is treated as a record with
        an identity key.  RHS records are merged with LHS records for which the
        identity key matches.  As such, an identity key is required in both LHS
        and RHS records.  This key is configurable.  When there is no LHS match
        for an RHS key, the RHS record is appended to the LHS list.

        Parameters:
        1. lhs (CommentedSeq) The merge target.
        2. rhs (CommentedSeq) The merge source.
        3. path (YAMLPath) Location within the DOM where this merge is taking
           place.
        4. node_coord (NodeCoords) The RHS root node, its parent, and reference
           within its parent; used for config lookups.

        Returns:  (CommentedSeq) The merged result.

        Raises:
        - `MergeException` when a clean merge is impossible.
        """
        if not isinstance(lhs, CommentedSeq):
            raise MergeException(
                "Impossible to add Array-of-Hash data to non-Array"
                " destination."
                , path)

        self.logger.debug(
            "Merging {} Hash(es) at {}.".format(len(rhs), path),
            prefix="Merger::_merge_arrays_of_hashes:  ", data=rhs)

        id_key: str = ""
        if len(rhs) > 0 and isinstance(rhs[0], CommentedMap):
            id_key = self.config.aoh_merge_key(
                NodeCoords(rhs[0], rhs, 0), rhs[0])
            self.logger.debug(
                "Merger::_merge_arrays_of_hashes:  RHS AoH yielded id_key:"
                "  {}.".format(id_key))

        merge_mode = self.config.aoh_merge_mode(node_coord)
        for idx, ele in enumerate(rhs):
            path_next = path + "[{}]".format(idx)
            self.logger.debug(
                "Processing element #{} at {}.".format(idx, path_next),
                prefix="Merger::_merge_arrays_of_hashes:  ", data=ele)

            if merge_mode is AoHMergeOpts.DEEP:
                if id_key in ele:
                    id_val = Nodes.tagless_value(ele[id_key])
                else:
                    raise MergeException(
                        "Mandatory identity key, {}, not present in Hash with"
                        " keys:  {}."
                        .format(id_key, ", ".join(ele.keys()))
                        , path_next
                    )

                merged_hash = False
                for lhs_hash in (
                    lhs_hash for lhs_hash in lhs
                    if isinstance(lhs_hash, CommentedMap)
                    and id_key in lhs_hash
                    and Nodes.tagless_value(lhs_hash[id_key]) == id_val
                ):
                    self._merge_dicts(lhs_hash, ele, path_next)
                    merged_hash = True

                    # Synchronize YAML Tags
                    lhs_hash.yaml_set_tag(ele.tag.value)
                    break
                if not merged_hash:
                    Nodes.append_list_element(lhs, ele,
                        ele.anchor.value if hasattr(ele, "anchor") else None)
            elif merge_mode is AoHMergeOpts.UNIQUE:
                if ele not in lhs:
                    Nodes.append_list_element(
                        lhs, ele,
                        ele.anchor.value if hasattr(ele, "anchor") else None)
            else:
                Nodes.append_list_element(lhs, ele,
                    ele.anchor.value if hasattr(ele, "anchor") else None)
        return lhs
Exemplo n.º 24
0
 def test_set_merge_mode_cli(self, quiet_logger, setting, mode):
     mc = MergerConfig(quiet_logger, SimpleNamespace(sets=setting))
     assert mc.set_merge_mode(NodeCoords(None, None, None)) == mode
Exemplo n.º 25
0
 def test_set_merge_mode_default(self, quiet_logger):
     mc = MergerConfig(quiet_logger, SimpleNamespace(sets=None))
     assert mc.set_merge_mode(NodeCoords(None, None,
                                         None)) == SetMergeOpts.UNIQUE
Exemplo n.º 26
0
 def test_aoh_merge_mode_default(self, quiet_logger):
     mc = MergerConfig(quiet_logger, SimpleNamespace(aoh=None))
     assert mc.aoh_merge_mode(NodeCoords(None, None,
                                         None)) == AoHMergeOpts.ALL
Exemplo n.º 27
0
 def test_repr(self):
     node_coord = NodeCoords([], None, None)
     assert repr(node_coord) == "NodeCoords('[]', 'None', 'None')"
Exemplo n.º 28
0
 def test_hash_merge_mode_default(self, quiet_logger):
     mc = MergerConfig(quiet_logger, SimpleNamespace(hashes=None))
     assert mc.hash_merge_mode(NodeCoords(None, None,
                                          None)) == HashMergeOpts.DEEP
    def test_debug_noisy(self, capsys):
        args = SimpleNamespace(verbose=False, quiet=False, debug=True)
        logger = ConsolePrinter(args)
        anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor")
        anchoredval = PlainScalarString("TestVal", anchor="Anchor")
        foldedstr = "123456789 123456789 123456789"
        foldedstrfolds = [10, 20]
        foldedval = FoldedScalarString(foldedstr)
        foldedval.fold_pos = foldedstrfolds

        logger.debug(anchoredval)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  (&Anchor)TestVal",
        ]) + "\n" == console.out

        logger.debug(["test", anchoredval])
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [0]test<class 'str'>",
            "DEBUG:  [1](&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
        ]) + "\n" == console.out

        logger.debug({"ichi": 1, anchoredkey: anchoredval})
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [ichi]1<class 'int'>",
            "DEBUG:  [TestKey](&KeyAnchor,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
        ]) + "\n" == console.out

        logger.debug({"ichi": 1, anchoredkey: "non-anchored value"})
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [ichi]1<class 'int'>",
            "DEBUG:  [TestKey](&KeyAnchor,_)non-anchored value<class 'str'>",
        ]) + "\n" == console.out

        logger.debug({"ichi": 1, "non-anchored-key": anchoredval})
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [ichi]1<class 'int'>",
            "DEBUG:  [non-anchored-key](_,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
        ]) + "\n" == console.out

        tagged_value = "value"
        tagged_value_node = TaggedScalar(tagged_value, tag="!tag")
        tagged_sequence = CommentedSeq(["a", "b"])
        tagged_sequence.yaml_set_tag("!raz")
        selfref_value = "self_referring"
        selfref_value_node = TaggedScalar(selfref_value, tag="!self_referring")
        logger.debug(
            "test_wrappers_consoleprinter:",
            prefix="test_debug_noisy:  ",
            header="--- HEADER ---",
            footer="=== FOOTER ===",
            data_header="+++ DATA HEADER +++",
            data_footer="::: DATA FOOTER :::",
            data=CommentedMap({
                "key": "value",
                "tagged": tagged_value_node,
                tagged_value_node: "untagged value",
                selfref_value_node: selfref_value_node,
                "array": ["ichi", "ni", "san"],
                "tagged_array": tagged_sequence,
                "aoh": [{"id": 1},{"id": 2},{"id": 3}],
                "aoa": [[True, True], [True, False], [False, True]],
                "dod": {"a": {"b": {"c": "d"}}},
                "set": CommentedSet(["one", "two"]),
            })
        )
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  test_debug_noisy:  --- HEADER ---",
            "DEBUG:  test_debug_noisy:  test_wrappers_consoleprinter:",
            "DEBUG:  test_debug_noisy:  +++ DATA HEADER +++",
            "DEBUG:  test_debug_noisy:  [key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  [tagged]<_,!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
            "DEBUG:  test_debug_noisy:  [value]<!tag,_>untagged value<class 'str'>",
            "DEBUG:  test_debug_noisy:  [self_referring]<!self_referring,!self_referring>self_referring<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
            "DEBUG:  test_debug_noisy:  [array][0]ichi<class 'str'>",
            "DEBUG:  test_debug_noisy:  [array][1]ni<class 'str'>",
            "DEBUG:  test_debug_noisy:  [array][2]san<class 'str'>",
            "DEBUG:  test_debug_noisy:  [tagged_array]<_,!raz>[0]a<class 'str'>",
            "DEBUG:  test_debug_noisy:  [tagged_array]<_,!raz>[1]b<class 'str'>",
            "DEBUG:  test_debug_noisy:  [aoh][0][id]1<class 'int'>",
            "DEBUG:  test_debug_noisy:  [aoh][1][id]2<class 'int'>",
            "DEBUG:  test_debug_noisy:  [aoh][2][id]3<class 'int'>",
            "DEBUG:  test_debug_noisy:  [aoa][0][0]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][0][1]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][1][0]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][1][1]False<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][2][0]False<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][2][1]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [dod][a][b][c]d<class 'str'>",
            "DEBUG:  test_debug_noisy:  [set]{one}<class 'str'>",
            "DEBUG:  test_debug_noisy:  [set]{two}<class 'str'>",
            "DEBUG:  test_debug_noisy:  ::: DATA FOOTER :::",
            "DEBUG:  test_debug_noisy:  === FOOTER ===",
        ]) + "\n" == console.out

        logger.debug(tagged_value_node)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  <!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
        ])

        logger.debug(tagged_sequence)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [tagged_array]<!raz>[0]a<class 'str'>",
            "DEBUG:  [tagged_array]<!raz>[1]b<class 'str'>",
        ])

        nc = NodeCoords(
            "value",
            dict(key="value"),
            "key",
            YAMLPath("doc_root.key"),
            [   (dict(doc_root=dict(key="value")), "doc_root"),
                (dict(key="value"), "key")],
            (PathSegmentTypes.KEY, "key")
        )
        logger.debug(
            "A node coordinate:", prefix="test_debug_noisy:  ", data=nc)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  test_debug_noisy:  A node coordinate:",
            "DEBUG:  test_debug_noisy:  (path)doc_root.key",
            "DEBUG:  test_debug_noisy:  (segment)[0]PathSegmentTypes.KEY<enum 'PathSegmentTypes'>",
            "DEBUG:  test_debug_noisy:  (segment)[1]key<class 'str'>",
            "DEBUG:  test_debug_noisy:  (node)value",
            "DEBUG:  test_debug_noisy:  (parent)[key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  (parentref)key",
            "DEBUG:  test_debug_noisy:  (ancestry)[0][0][doc_root][key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  (ancestry)[0][1]doc_root<class 'str'>",
            "DEBUG:  test_debug_noisy:  (ancestry)[1][0][key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  (ancestry)[1][1]key<class 'str'>",
        ]) + "\n" == console.out

        logger.debug(foldedval)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  {}<class 'ruamel.yaml.scalarstring.FoldedScalarString'>,folded@{}".format(foldedstr, foldedstrfolds)
        ])
Exemplo n.º 30
0
 def test_str(self):
     node_coord = NodeCoords([], None, None)
     assert str(node_coord) == "[]"