Esempio n. 1
0
 def test_get_impossible_nodes_error(self, quiet_logger, yamlpath,
                                     mustexist):
     yamldata = """---
     ints:
       - 1
       - 2
       - 3
       - 4
       - 5
     floats:
       - 1.1
       - 2.2
       - 3.3
     """
     yaml = YAML()
     processor = Processor(quiet_logger, yaml.load(yamldata))
     with pytest.raises(YAMLPathException) as ex:
         nodes = list(processor.get_nodes(yamlpath, mustexist=mustexist))
     assert -1 < str(ex.value).find("does not match any nodes")
Esempio n. 2
0
    def test_delete_nodes(self, quiet_logger, delete_yamlpath, pathseperator, old_deleted_nodes, new_flat_data):
        yamldata = """---
aliases:
  - &alias_number 1
  - &alias_bool true
number: 1
bool: true
alias_number: *alias_number
alias_bool: *alias_bool
hash:
  number: 1
  bool: true
  alias_number: *alias_number
  alias_bool: *alias_bool
complex:
  hash:
    number: 1
    bool: true
    alias_number: *alias_number
    alias_bool: *alias_bool
records:
  - id: ABC
    data: 123
  - id: BCD
    data: 987
  - id: CDE
    data: 8B8
"""
        yaml = YAML()
        data = yaml.load(yamldata)
        processor = Processor(quiet_logger, data)

        # The return set must be received lest no nodes will be deleted
        deleted_nodes = []
        for nc in processor.delete_nodes(delete_yamlpath, pathsep=pathseperator):
            deleted_nodes.append(nc)

        for (test_value, verify_node_coord) in zip(old_deleted_nodes, deleted_nodes):
            assert test_value, unwrap_node_coords(verify_node_coord)

        for (test_value, verify_node_coord) in zip(new_flat_data, processor.get_nodes("**")):
            assert test_value, unwrap_node_coords(verify_node_coord)
Esempio n. 3
0
    def test_cannot_add_novel_alias_keys(self, quiet_logger):
        yamldata = """---
        anchorKeys:
          &keyOne aliasOne: 1 1 Alpha 1
          &keyTwo aliasTwo: 2 2 Beta 2

        hash:
          *keyOne :
            subval: 1.1
          *keyTwo :
            subval: 2.2
        """
        yaml = YAML()
        data = yaml.load(yamldata)
        processor = Processor(quiet_logger, data)

        yamlpath = YAMLPath("hash[&keyThree].subval")
        newvalue = "Abort"
        with pytest.raises(YAMLPathException) as ex:
            nodes = list(processor.get_nodes(yamlpath))
        assert -1 < str(ex.value).find("Cannot add")
Esempio n. 4
0
    def test_non_int_array_index_error(self, quiet_logger):
        from collections import deque
        yamldata = """---
        - 1
        """
        yaml = YAML()
        data = yaml.load(yamldata)
        path = YAMLPath("[0]")
        processor = Processor(quiet_logger, data)
        strp = str(path)

        path._escaped = deque([
            (PathSegmentTypes.INDEX, "0F"),
        ])
        path._unescaped = deque([
            (PathSegmentTypes.INDEX, "0F"),
        ])

        with pytest.raises(YAMLPathException) as ex:
            nodes = list(processor._get_nodes_by_index(data, path, 0))
        assert -1 < str(ex.value).find("is not an integer array index")
Esempio n. 5
0
    def test_get_nodes_by_unknown_path_segment_error(self, quiet_logger):
        from collections import deque
        from enum import Enum
        from yamlpath.enums import PathSegmentTypes
        names = [m.name for m in PathSegmentTypes] + ['DNF']
        PathSegmentTypes = Enum('PathSegmentTypes', names)

        yamldata = """---
        key: value
        """
        yaml = YAML()
        data = yaml.load(yamldata)
        processor = Processor(quiet_logger, data)
        path = YAMLPath("abc")
        stringified = str(path)     # Force Path to parse
        path._escaped = deque([
            (PathSegmentTypes.DNF, "abc"),
        ])

        with pytest.raises(NotImplementedError):
            nodes = list(processor._get_nodes_by_path_segment(data, path, 0))
Esempio n. 6
0
    def test_get_none_data_nodes(self, quiet_logger):
        processor = Processor(quiet_logger, None)
        yamlpath = YAMLPath("abc")
        optional_matches = 0
        must_exist_matches = 0
        req_node_matches = 0
        traversal_matches = 0

        for node in processor.get_nodes(yamlpath, mustexist=False):
            optional_matches += 1
        for node in processor.get_nodes(yamlpath, mustexist=True):
            must_exist_matches += 1
        for node in processor._get_required_nodes(None, yamlpath):
            req_node_matches += 1
        for node in processor._get_nodes_by_traversal(None, yamlpath, 0):
            traversal_matches += 1

        assert optional_matches == 0
        assert must_exist_matches == 0
        assert req_node_matches == 0
        assert traversal_matches == 1   # A None node traverses into null
Esempio n. 7
0
    def prepare(self, data: Any) -> None:
        """
        Load references to all nodes which match config rules.

        Parameters:
        1. data (Any) The DOM for which to load configuration.

        Returns:  N/A
        """
        if self.config is None:
            return

        # Eliminate previous rules and keys to limit scanning to only those
        # nodes which exist within this new document.
        self.rules = {}
        self.keys = {}

        # Load new rules and keys
        proc = Processor(self.log, data)
        self._prepare_user_rules(proc, "rules", self.rules)
        self._prepare_user_rules(proc, "keys", self.keys)
Esempio n. 8
0
 def test_get_nodes(self, quiet_logger, yamlpath, results, mustexist,
                    default):
     yamldata = """---
     aliases:
       - &aliasAnchorOne Anchored Scalar Value
       - &aliasAnchorTwo Hey, Number Two!
     array_of_hashes: &arrayOfHashes
       - step: 1
         name: one
       - step: 2
         name: two
     rollback_hashes:
       on_condition:
         failure:
           - step: 3
             name: three
           - step: 4
             name: four
     disabled_steps:
       - 2
       - 3
     squads:
       alpha: 1.1
       bravo: 2.2
       charlie: 3.3
       delta: 4.4
     number_keys:
       1: one
       2: two
       3: three
     """
     yaml = YAML()
     processor = Processor(quiet_logger, yaml.load(yamldata))
     matchidx = 0
     for node in processor.get_nodes(yamlpath,
                                     mustexist=mustexist,
                                     default_value=default):
         assert node == results[matchidx]
         matchidx += 1
     assert len(results) == matchidx
Esempio n. 9
0
 def test_get_singular_collectors(self, quiet_logger, yamlpath, results):
     yamldata = """---
     temps:
       - 32
       - 0
       - 110
       - 100
       - 72
       - 68
       - 114
       - 34
       - 36
     """
     yaml = YAML()
     processor = Processor(quiet_logger, yaml.load(yamldata))
     matchidx = 0
     # Note that Collectors deal with virtual DOMs, so mustexist must always
     # be set True.  Otherwise, ephemeral virtual nodes would be created and
     # discarded.  Is this desirable?  Maybe, but not today.  For now, using
     # Collectors without setting mustexist=True will be undefined behavior.
     for node in processor.get_nodes(yamlpath, mustexist=True):
         assert unwrap_node_coords(node) == results[matchidx]
         matchidx += 1
     assert len(results) == matchidx
Esempio n. 10
0
    def test_key_anchor_children(self, quiet_logger):
        yamldata = """---
        anchorKeys:
          &keyOne aliasOne: 1 1 Alpha 1
          &keyTwo aliasTwo: 2 2 Beta 2

        hash:
          *keyOne :
            subval: 1.1
          *keyTwo :
            subval: 2.2
        """
        yaml = YAML()
        data = yaml.load(yamldata)
        processor = Processor(quiet_logger, data)

        yamlpath = YAMLPath("hash[&keyTwo].subval")
        newvalue = "Mute audibles"
        processor.set_value(yamlpath, newvalue, mustexist=True)
        matchtally = 0
        for node in processor.get_nodes(yamlpath):
            assert unwrap_node_coords(node) == newvalue
            matchtally += 1
        assert matchtally == 1
Esempio n. 11
0
    def test_get_nodes(self, quiet_logger, yamlpath, results, mustexist, default):
        yamldata = """---
aliases:
  - &aliasAnchorOne Anchored Scalar Value
  - &aliasAnchorTwo Hey, Number Two!
array_of_hashes: &arrayOfHashes
  - step: 1
    name: one
  - step: 2
    name: two
rollback_hashes:
  on_condition:
    failure:
      - step: 3
        name: three
      - step: 4
        name: four
disabled_steps:
  - 2
  - 3
squads:
  alpha: 1.1
  bravo: 2.2
  charlie: 3.3
  delta: 4.4
number_keys:
  1: one
  2: two
  3: three

# For traversal tests:
name: Name 0-0
lots_of_names:
  name: Name 1-1
  tier1:
    name: Name 2-1
    tier2:
      name: Name 3-1
      list_of_named_objects:
        - name: Name 4-1
          tag: Tag 4-1
          other: Other 4-1
          dude: Dude 4-1
        - tag: Tag 4-2
          name: Name 4-2
          dude: Dude 4-2
          other: Other 4-2
        - other: Other 4-3
          dude: Dude 4-3
          tag: Tag 4-3
          name: Name 4-3
        - dude: Dude 4-4
          tag: Tag 4-4
          name: Name 4-4
          other: Other 4-4

###############################################################################
# For descendent searching:
products_hash:
  doodad:
    availability:
      start:
        date: 2020-10-10
        time: 08:00
      stop:
        date: 2020-10-29
        time: 17:00
    dimensions:
      width: 5
      height: 5
      depth: 5
      weight: 10
  doohickey:
    availability:
      start:
        date: 2020-08-01
        time: 10:00
      stop:
        date: 2020-09-25
        time: 10:00
    dimensions:
      width: 1
      height: 2
      depth: 3
      weight: 4
  widget:
    availability:
      start:
        date: 2020-01-01
        time: 12:00
      stop:
        date: 2020-01-01
        time: 16:00
    dimensions:
      width: 9
      height: 10
      depth: 1
      weight: 4
products_array:
  - product: doodad
    availability:
      start:
        date: 2020-10-10
        time: 08:00
      stop:
        date: 2020-10-29
        time: 17:00
    dimensions:
      width: 5
      height: 5
      depth: 5
      weight: 10
  - product: doohickey
    availability:
      start:
        date: 2020-08-01
        time: 10:00
      stop:
        date: 2020-09-25
        time: 10:00
    dimensions:
      width: 1
      height: 2
      depth: 3
      weight: 4
  - product: widget
    availability:
      start:
        date: 2020-01-01
        time: 12:00
      stop:
        date: 2020-01-01
        time: 16:00
    dimensions:
      width: 9
      height: 10
      depth: 1
      weight: 4
###############################################################################
"""
        yaml = YAML()
        processor = Processor(quiet_logger, yaml.load(yamldata))
        matchidx = 0
        for node in processor.get_nodes(
                yamlpath, mustexist=mustexist, default_value=default
        ):
            assert unwrap_node_coords(node) == results[matchidx]
            matchidx += 1
        assert len(results) == matchidx
Esempio n. 12
0
    def test_good_multi_replacements(self, script_runner, tmp_path_factory, old_eyaml_keys, new_eyaml_keys, quiet_logger):
        from yamlpath.func import unwrap_node_coords
        from yamlpath.common import Parsers
        from yamlpath import Processor
        from yamlpath.eyaml import EYAMLProcessor

        simple_content = """---
        encrypted_string: ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAHA4rPcTzvgzPLtnGz3yoyX/kVlQ5TnPXcScXK2bwjguGZLkuzv/JVPAsOm4t6GlnROpy4zb/lUMHRJDChJhPLrSj919B8//huoMgw0EU5XTcaN6jeDDjL+vhjswjvLFOux66UwvMo8sRci/e2tlFiam8VgxzV0hpF2qRrL/l84V04gL45kq4PCYDWrJNynOwYVbSIF+qc5HaF25H8kHq1lD3RB6Ob/J942Q7k5Qt7W9mNm9cKZmxwgtUgIZWXW6mcPJ2dXDB/RuPJJSrLsb1VU/DkhdgxaNzvLCA+MViyoFUkCfHFNZbaHKNkoYXBy7dLmoh/E5tKv99FeG/7CzL3DBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCVU5Mjt8+4dLkoqB9YArfkgCDkdIhXR9T1M4YYa1qTE6by61VPU3g1aMExRmo4tNZ8FQ==]
        encrypted_block: >
          ENC[PKCS7,MIIBeQYJKoZIhvcNAQcDoIIBajCCAWYCAQAxggEhMIIBHQIBADAFMAACAQEw
          DQYJKoZIhvcNAQEBBQAEggEAnxQVqyIgRTb/+VP4Q+DLJcnlS8YPouXEW8+z
          it9uwUA02CEPxCEU944GcHpgTY3EEtkm+2Z/jgXI119VMML+OOQ1NkwUiAw/
          wq0vwz2D16X31XzhedQN5FZbfZ1C+2tWSQfCjE0bu7IeHfyR+k2ssD11kNZh
          JDEr2bM2dwOdT0y7VGcQ06vI9gw6UXcwYAgS6FoLm7WmFftjcYiNB+0EJSW0
          VcTn2gveaw9iOQcum/Grby+9Ybs28fWd8BoU+ZWDpoIMEceujNa9okIXNPJO
          jcvv1sgauwJ3RX6WFQIy/beS2RT5EOLhWIZCAQCcgJWgovu3maB7dEUZ0NLG
          OYUR7zA8BgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAbO16EzQ5/cdcvgB0g
          tpKIgBAEgTLT5n9Jtc9venK0CKso]
        """
        anchored_content = """---
        aliases:
          - &blockStyle >
            ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEw
            DQYJKoZIhvcNAQEBBQAEggEArvk6OYa1gACTdrWq2SpCrtGRlc61la5AGU7L
            aLTyKfqD9vqx71RDjobfOF96No07kLsEpoAJ+LKKHNjdG6kjvpGPmttj9Dkm
            XVoU6A+YCmm4iYFKD/NkoSOEyAkoDOXSqdjrgt0f37GefEsXt6cqAavDpUJm
            pmc0KI4TCG5zpfCxqttMs+stOY3Y+0WokkulQujZ7K3SdWUSHIysgMrWiect
            Wdg5unxN1A/aeyvhgvYSNPjU9KBco7SDnigSs9InW/QghJFrZRrDhTp1oTUc
            qK5lKvaseHkVGi91vPWeLQxZt1loJB5zL6j5BxMbvRfJK+wc3ax2u4x8WTAB
            EurCwzBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAwcy7jvcOGcMfLEtug
            LEXbgCBkocdckuDe14mVGmUmM++xN34OEVRCeGVWWUnWq1DJ4Q==]
          - &stringStyle ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAIu44u62q5sVfzC7kytLi2Z/EzH2DKr4vDsoqDBeSZ71aRku/uSrjyiO4lyoq9Kva+eBAyjBay5fnqPVBaU3Rud2pdEoZEoyofi02jn4hxUKpAO1W0AUgsQolGe53qOdM4U8RbwnTR0gr3gp2mCd18pH3SRMP9ryrsBAxGzJ6mR3RgdZnlTlqVGXCeWUeVpbH+lcHw3uvd+o/xkvJ/3ypxz+rWILiAZ3QlCirzn/qb2fHuKf3VBh8RVFuQDaM5voajZlgjD6KzNCsbATOqOA6eJI4j0ngPdDlIjGHAnahuyluQ5f5SIaIjLC+ZeCOfIYni0MQ+BHO0JNbccjq2Unb7TBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCYmAI0Ao3Ok1cSmVw0SgQGgCBK62z1r5RfRjf1xKfqDxTsGUHfsUmM3EjGJfnWzCRvuQ==]
        block: *blockStyle
        string: *stringStyle
        yet_another:
          'more.complex.child': *blockStyle
        """
        simple_file = create_temp_yaml_file(tmp_path_factory, simple_content)
        anchored_file = create_temp_yaml_file(tmp_path_factory, anchored_content)

        result = script_runner.run(
            self.command,
            "--newprivatekey={}".format(new_eyaml_keys[0]),
            "--newpublickey={}".format(new_eyaml_keys[1]),
            "--oldprivatekey={}".format(old_eyaml_keys[0]),
            "--oldpublickey={}".format(old_eyaml_keys[1]),
            simple_file,
            anchored_file
        )
        assert result.success, result.stderr

        with open(simple_file, 'r') as fhnd:
            simple_data = fhnd.read()

        with open(anchored_file, 'r') as fhnd:
            anchored_data = fhnd.read()

        assert not simple_data == simple_content
        assert not anchored_data == anchored_content

        # Verify that block and string formatting is correct
        yaml = Parsers.get_yaml_editor()

        (yaml_rotated_data, doc_loaded) = Parsers.get_yaml_data(
            yaml, quiet_logger,
            anchored_data, literal=True)
        if not doc_loaded:
            # An error message has already been logged
            assert False, "Rotated anchored data failed to load"

        source_processor = Processor(quiet_logger, yaml_rotated_data)
        for node in source_processor.get_nodes('/block', mustexist=True):
            assert not '  ' in unwrap_node_coords(node)


        # Test that the pre- and post-rotated values are identical
        (yaml_anchored_data, doc_loaded) = Parsers.get_yaml_data(
            yaml, quiet_logger,
            anchored_content, literal=True)
        if not doc_loaded:
            # An error message has already been logged
            assert False, "Original anchored data failed to load"

        (yaml_rotated_data, doc_loaded) = Parsers.get_yaml_data(
            yaml, quiet_logger,
            anchored_data, literal=True)
        if not doc_loaded:
            # An error message has already been logged
            assert False, "Rotated anchored data failed to load"

        source_processor = EYAMLProcessor(
            quiet_logger, yaml_anchored_data,
            privatekey=old_eyaml_keys[0],
            publickey=old_eyaml_keys[1])
        for node in source_processor.get_eyaml_values(
            '/block', True
        ):
            assert unwrap_node_coords(node) == 'This is a test value.'

        rotated_processor = EYAMLProcessor(
            quiet_logger, yaml_rotated_data,
            privatekey=new_eyaml_keys[0],
            publickey=new_eyaml_keys[1])
        for node in rotated_processor.get_eyaml_values(
            '/block', True
        ):
            assert unwrap_node_coords(node) == 'This is a test value.'
Esempio n. 13
0
def check_playbook_file_removed_and_added(playbook_path):
    playbook_ok = True

    yaml_parser = Parsers.get_yaml_editor()

    logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False)
    log = ConsolePrinter(logging_args)

    # Find every path removed by a file Task (also matches tasks within blocks)
    files_absent_string = "tasks.**.file[state=absent][parent()].path"
    files_absent_yamlpath = YAMLPath(files_absent_string)
    path_editing_tasks_yamlpath = ""

    log.info("Info: Evaluating playbook '{}'".format(playbook_path))
    (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml_parser, log,
                                                    playbook_path)
    if not doc_loaded:
        # There was an issue loading the file; an error message has already been
        # printed via ConsolePrinter.
        return False

    processor = Processor(log, yaml_data)
    try:
        for node in processor.get_nodes(files_absent_yamlpath,
                                        mustexist=False):
            path = str(node)
            # 'node' is a NodeCoords.
            if path == 'None':
                continue
            elif "{{" in path:
                # Identified path is a Jinja expression, unfortunately there is no easy way to get
                # the actual path without making this test very complicated
                continue

            # Check if this paths is used in any of the following ansible modules
            ansible_modules = ["lineinfile", "blockinfile", "copy"]
            path_editing_tasks_string = "tasks.**.[.=~/{modules}/][*='{path}'][parent()].name"
            path_editing_tasks_yamlpath = YAMLPath(
                path_editing_tasks_string.format(
                    modules="|".join(ansible_modules), path=node))
            for task in processor.get_nodes(path_editing_tasks_yamlpath,
                                            mustexist=False):
                log.info(
                    "Error: Task '{}' manipulates a file that is removed by another task"
                    .format(task))
                playbook_ok = False
    except YAMLPathException as ex:
        no_file_msg = (
            "Cannot add PathSegmentTypes.TRAVERSE subreference to lists at 'None' "
            "in '{}'.")
        if str(ex) == no_file_msg.format(files_absent_string):
            log.info(
                "Info: Playbook {} has no 'file' tasks.".format(playbook_path))
        elif path_editing_tasks_yamlpath and str(ex) == no_file_msg.format(
                path_editing_tasks_yamlpath):
            log.info("Info: Playbook {} has no '{}' tasks.".format(
                playbook_path, " ".join(ansible_modules)))
        else:
            log.info("Error: {}.".format(ex))

    return playbook_ok
Esempio n. 14
0
    def merge_with(self, rhs: Any) -> None:
        """
        Merge this document with another.

        Parameters:
        1. rhs (Any) The document to merge into this one.

        Returns:  N/A

        Raises:
        - `MergeException` when a clean merge is impossible.
        """
        # Do nothing when RHS is None (empty document)
        if rhs is None:
            return

        # Remove all comments (no sensible way to merge them)
        Parsers.delete_all_comments(rhs)

        # When LHS is None (empty document), just dump all of RHS into it,
        # honoring any --mergeat|-m location as best as possible.
        insert_at = self.config.get_insertion_point()
        if self.data is None:
            self.logger.debug(
                "Replacing None data with:", prefix="Merger::merge_with:  ",
                data=rhs, data_header="     *****")
            self.data = Nodes.build_next_node(insert_at, 0, rhs)
            self.logger.debug(
                "Merged document is now:", prefix="Merger::merge_with:  ",
                data=self.data, footer="     ***** ***** *****")
            if isinstance(rhs, (dict, list, CommentedSet, set)):
                # Only Scalar values need further processing
                return

        # Resolve any anchor conflicts
        self._resolve_anchor_conflicts(rhs)

        # Prepare the merge rules
        self.config.prepare(rhs)

        # Merge into each insertion point
        merge_performed = False
        lhs_proc = Processor(self.logger, self.data)
        for node_coord in self._get_merge_target_nodes(
            insert_at, lhs_proc, rhs
        ):
            target_node = node_coord.node
            Parsers.set_flow_style(
                rhs, (target_node.fa.flow_style()
                      if hasattr(target_node, "fa")
                      else None))

            if isinstance(rhs, CommentedMap):
                # The RHS document root is a dict
                merge_performed = self._insert_dict(
                    insert_at, target_node, rhs)
            elif isinstance(rhs, CommentedSeq):
                # The RHS document root is a list
                merge_performed = self._insert_list(
                    insert_at, target_node, rhs)
            elif isinstance(rhs, CommentedSet):
                # The RHS document is a set
                merge_performed = self._insert_set(
                    insert_at, target_node, rhs)
            else:
                # The RHS document root is a Scalar value
                merge_performed = self._insert_scalar(
                    insert_at, target_node, lhs_proc, rhs)

        self.logger.debug(
            "Completed merge operation, resulting in document:",
            prefix="Merger::merge_with:  ", data=self.data)

        if not merge_performed:
            raise MergeException(
                "A merge was not performed.  Ensure your target path matches"
                " at least one node in the left document(s).", insert_at)
Esempio n. 15
0
    def transform(cls, configuration: Configuration):
        logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False)
        log = ConsolePrinter(logging_args)

        processor = Processor(log, configuration.config)

        # [.!<100] effectively means that the value is non-numerical
        paths_to_hashes = [
            # # branches, old syntax
            "**.push_access_level[.!<100]",
            "**.merge_access_level[.!<100]",
            "**.unprotect_access_level[.!<100]",
            # members & group members
            "**.access_level[.!<100]",
            "**.group_access[.!<100]",
            # old syntax
            "**.group_access_level[.!<100]",
            # tags
            "**.create_access_level[.!<100]",
        ]

        for path in paths_to_hashes:
            try:
                for node_coordinate in processor.get_nodes(path):
                    try:
                        access_level_string = str(node_coordinate.node)
                        node_coordinate.parent[
                            node_coordinate.parentref
                        ] = AccessLevel.get_value(access_level_string)
                    except KeyError:
                        fatal(
                            f"Configuration string '{access_level_string}' is not one of the valid access levels:"
                            f" {', '.join(AccessLevel.get_canonical_names())}",
                            exit_code=EXIT_INVALID_INPUT,
                        )
            except YAMLPathException:
                pass

        # there are different than the above, as they are elements of arrays
        # so we need different search query and an extra condition for
        # transformation
        paths_to_arrays = [
            # # branches, new GitLab Premium syntax
            "**.allowed_to_push.*.[access_level!<100]",
            "**.allowed_to_merge.*.[access_level!<100]",
            "**.allowed_to_unprotect.*.[access_level!<100]",
        ]

        for path in paths_to_arrays:
            try:
                for node_coordinate in processor.get_nodes(path):
                    if node_coordinate.parentref == "access_level":
                        try:
                            access_level_string = str(node_coordinate.node)
                            node_coordinate.parent[
                                node_coordinate.parentref
                            ] = AccessLevel.get_value(access_level_string)
                        except KeyError:
                            fatal(
                                f"Configuration string '{access_level_string}' is not one of the valid access levels:"
                                f" {', '.join(AccessLevel.get_canonical_names())}",
                                exit_code=EXIT_INVALID_INPUT,
                            )
            except YAMLPathException:
                pass
Esempio n. 16
0
    def merge_with(self, rhs: Any) -> None:
        """
        Merge this document with another.

        Parameters:
        1. rhs (Any) The document to merge into this one.

        Returns:  N/A

        Raises:
        - `MergeException` when a clean merge is impossible.
        """
        # Do nothing when RHS is None (empty document)
        if rhs is None:
            return

        # Remove all comments (no sensible way to merge them)
        Parsers.delete_all_comments(rhs)

        # When LHS is None (empty document), just dump all of RHS into it,
        # honoring any --mergeat|-m location as best as possible.
        insert_at = self.config.get_insertion_point()
        if self.data is None:
            self.logger.debug("Replacing None data with:",
                              prefix="Merger::merge_with:  ",
                              data=rhs,
                              data_header="     *****")
            self.data = Nodes.build_next_node(insert_at, 0, rhs)
            self.logger.debug("Merged document is now:",
                              prefix="Merger::merge_with:  ",
                              data=self.data,
                              footer="     ***** ***** *****")
            if isinstance(rhs, (dict, list)):
                # Only Scalar values need further processing
                return

        # Resolve any anchor conflicts
        self._resolve_anchor_conflicts(rhs)

        # Prepare the merge rules
        self.config.prepare(rhs)

        # Identify a reasonable default should a DOM need to be built up to
        # receive the RHS data.
        default_val = rhs
        if isinstance(rhs, CommentedMap):
            default_val = {}
        elif isinstance(rhs, CommentedSeq):
            default_val = []

        # Loop through all insertion points and the elements in RHS
        merge_performed = False
        nodes: List[NodeCoords] = []
        lhs_proc = Processor(self.logger, self.data)
        for node_coord in lhs_proc.get_nodes(insert_at,
                                             default_value=default_val):
            nodes.append(node_coord)

        for node_coord in nodes:
            target_node = (node_coord.node if isinstance(
                node_coord.node,
                (CommentedMap, CommentedSeq)) else node_coord.parent)

            Parsers.set_flow_style(rhs,
                                   (target_node.fa.flow_style() if hasattr(
                                       target_node, "fa") else None))

            if isinstance(rhs, CommentedMap):
                # The RHS document root is a map
                if isinstance(target_node, CommentedSeq):
                    # But the destination is a list
                    self._merge_lists(target_node, CommentedSeq([rhs]),
                                      insert_at)
                else:
                    self._merge_dicts(target_node, rhs, insert_at)

                    # Synchronize YAML Tags
                    self.logger.debug(
                        "Merger::merge_with:  Setting LHS tag from {} to {}.".
                        format(target_node.tag.value, rhs.tag.value))
                    target_node.yaml_set_tag(rhs.tag.value)
                merge_performed = True
            elif isinstance(rhs, CommentedSeq):
                # The RHS document root is a list
                self._merge_lists(target_node, rhs, insert_at)
                merge_performed = True

                # Synchronize any YAML Tag
                self.logger.debug(
                    "Merger::merge_with:  Setting LHS tag from {} to {}.".
                    format(target_node.tag.value, rhs.tag.value))
                target_node.yaml_set_tag(rhs.tag.value)
            else:
                # The RHS document root is a Scalar value
                target_node = node_coord.node
                if isinstance(target_node, CommentedSeq):
                    Nodes.append_list_element(target_node, rhs)
                    merge_performed = True
                elif isinstance(target_node, CommentedMap):
                    raise MergeException(
                        "Impossible to add Scalar value, {}, to a Hash without"
                        " a key.  Change the value to a 'key: value' pair, a"
                        " '{{key: value}}' Hash, or change the merge target to"
                        " an Array or other Scalar value.".format(rhs),
                        insert_at)
                else:
                    lhs_proc.set_value(insert_at, rhs)
                    merge_performed = True

        self.logger.debug("Completed merge operation, resulting in document:",
                          prefix="Merger::merge_with:  ",
                          data=self.data)

        if not merge_performed:
            raise MergeException(
                "A merge was not performed.  Ensure your target path matches"
                " at least one node in the left document(s).", insert_at)