def test_get_none_data_nodes(self, quiet_logger): processor = Processor(quiet_logger, None) yamlpath = YAMLPath("abc") matches = 0 for node in processor.get_nodes(yamlpath, mustexist=False): matches += 1 for node in processor.get_nodes(yamlpath, mustexist=True): matches += 1 for node in processor._get_required_nodes(None, yamlpath): matches += 1 assert matches == 0
def test_no_attrs_to_scalars_errors(self, quiet_logger): yamldata = """--- scalar: value """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes("scalar[6]")) assert -1 < str(ex.value).find("Cannot add") with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes("scalar.key")) assert -1 < str(ex.value).find("Cannot add")
def test_set_value(self, quiet_logger, yamlpath, value, tally, mustexist, vformat, pathsep): yamldata = """--- aliases: - &testAnchor Initial Value top_array: # Comment 1 - 1 # Comment 2 - 2 # Comment N top_scalar: Top-level plain scalar string """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) processor.set_value(yamlpath, value, mustexist=mustexist, value_format=vformat, pathsep=pathsep) matchtally = 0 for node in processor.get_nodes(yamlpath, mustexist=mustexist): assert node == value matchtally += 1 assert matchtally == tally
def test_get_every_data_type(self, quiet_logger): # Contributed by https://github.com/AndydeCleyre yamldata = """--- intthing: 6 floatthing: 6.8 yesthing: yes nothing: no truething: true falsething: false nullthing: null nothingthing: emptystring: "" nullstring: "null" """ results = [6, 6.8, "yes", "no", True, False, None, None, "", "null"] yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) yamlpath = YAMLPath("*") match_index = 0 for node in processor.get_nodes(yamlpath): assert unwrap_node_coords(node) == results[match_index] match_index += 1
def test_set_value(self, quiet_logger, yamlpath, value, tally, mustexist, vformat, pathsep): yamldata = """--- aliases: - &testAnchor Initial Value top_array: # Comment 1 - 1 # Comment 2 - 2 # Comment N top_scalar: Top-level plain scalar string top_hash: positive_float: 3.14159265358 negative_float: -11.034 null_value: """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) processor.set_value(yamlpath, value, mustexist=mustexist, value_format=vformat, pathsep=pathsep) matchtally = 0 for node in processor.get_nodes(yamlpath, mustexist=mustexist): assert unwrap_node_coords(node) == value matchtally += 1 assert matchtally == tally
def test_key_anchor_changes(self, quiet_logger, yamlpath, value, tally, mustexist, vformat, pathsep): yamldata = """--- anchorKeys: &keyOne aliasOne: 11A1 &keyTwo aliasTwo: 22B2 &recursiveAnchorKey subjectKey: *recursiveAnchorKey hash: *keyOne : subval: 1.1 *keyTwo : subval: 2.2 *recursiveAnchorKey : subval: 3.3 """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) yamlpath = YAMLPath(yamlpath) processor.set_value(yamlpath, value, mustexist=mustexist, value_format=vformat, pathsep=pathsep) matchtally = 0 for node in processor.get_nodes(yamlpath): assert unwrap_node_coords(node) == value matchtally += 1 assert matchtally == tally
def test_set_nonunique_values(self, quiet_logger, yamlpath, value, verifications): yamldata = """--- aliases: - &alias_number 1 - &alias_bool true number: 1 bool: true alias_number: *alias_number alias_bool: *alias_bool hash: number: 1 bool: true alias_number: *alias_number alias_bool: *alias_bool complex: hash: number: 1 bool: true alias_number: *alias_number alias_bool: *alias_bool """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) processor.set_value(yamlpath, value) for verification in verifications: for verify_node_coord in processor.get_nodes(verification[0]): assert unwrap_node_coords(verify_node_coord) == verification[1]
def test_scalar_collectors(self, quiet_logger, yamlpath, results): yamldata = """--- list1: - 1 - 2 - 3 list2: - 4 - 5 - 6 exclude: - 3 - 4 """ yaml = YAML() processor = Processor(quiet_logger, yaml.load(yamldata)) matchidx = 0 # Note that Collectors deal with virtual DOMs, so mustexist must always # be set True. Otherwise, ephemeral virtual nodes would be created and # discarded. Is this desirable? Maybe, but not today. For now, using # Collectors without setting mustexist=True will be undefined behavior. for node in processor.get_nodes(yamlpath, mustexist=True): assert unwrap_node_coords(node) == results[matchidx] matchidx += 1 assert len(results) == matchidx
def _prepare_user_rules(self, proc: Processor, section: str, collector: dict) -> None: """ Identify DOM nodes matching user-defined diff rules. Parameters: 1. proc (Processor) Reference to the DOM Processor. 2. section (str) User-configuration file section defining the diff rules to apply. 3. collector (dict) Storage collector for matching nodes. Returns: N/A """ if self.config is None or not section in self.config: self.log.warning( "User-specified configuration file has no {} section.".format( section)) return for rule_key in self.config[section]: rule_value = self.config[section][rule_key] if "=" in rule_value: # There were at least two = signs on the configuration line conf_line = rule_key + "=" + rule_value delim_pos = conf_line.rfind("=") rule_key = conf_line[0:delim_pos].strip() rule_value = conf_line[delim_pos + 1:].strip() self.log.debug( "DifferConfig::_prepare_user_rules: Reconstituted" " configuration line '{}' to extract adjusted key '{}'" " with value '{}'".format(conf_line, rule_key, rule_value)) rule_path = YAMLPath(rule_key) yaml_path = YAMLPath(rule_path) self.log.debug( "DifferConfig::_prepare_user_rules: Matching '{}' nodes to" " YAML Path '{}' from key, {}.".format(section, yaml_path, rule_key)) try: for node_coord in proc.get_nodes(yaml_path, mustexist=True): self.log.debug( "Node will have comparisons rule, {}:".format( rule_value), prefix="DifferConfig::_prepare_user_rules: ", data=node_coord.node) collector[node_coord] = rule_value except YAMLPathException: self.log.warning("{} YAML Path matches no nodes: {}".format( section, yaml_path)) self.log.debug("Matched rules to nodes:", prefix="DifferConfig::_prepare_user_rules: ") for node_coord, diff_rule in collector.items(): self.log.debug("... RULE: {}".format(diff_rule), prefix="DifferConfig::_prepare_user_rules: ") self.log.debug("... NODE:", data=node_coord, prefix="DifferConfig::_prepare_user_rules: ")
def test_illegal_traversal_recursion(self, quiet_logger): yamldata = """--- any: data """ yaml = YAML() processor = Processor(quiet_logger, yaml.load(yamldata)) with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes("**.**")) assert -1 < str(ex.value).find("Repeating traversals are not allowed")
def test_enforce_pathsep(self, quiet_logger): yamldata = """--- aliases: - &aliasAnchorOne Anchored Scalar Value """ yaml = YAML() processor = Processor(quiet_logger, yaml.load(yamldata)) yamlpath = YAMLPath("aliases[&aliasAnchorOne]") for node in processor.get_nodes(yamlpath, pathsep=PathSeperators.FSLASH): assert unwrap_node_coords(node) == "Anchored Scalar Value"
def test_adjoined_collectors_error(self, quiet_logger): yamldata = """--- key: value """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes("(&arrayOfHashes.step)(disabled_steps)")) assert -1 < str(ex.value).find("has no meaning")
def test_get_none_data_nodes(self, quiet_logger): processor = Processor(quiet_logger, None) yamlpath = YAMLPath("abc") optional_matches = 0 must_exist_matches = 0 req_node_matches = 0 traversal_matches = 0 for node in processor.get_nodes(yamlpath, mustexist=False): optional_matches += 1 for node in processor.get_nodes(yamlpath, mustexist=True): must_exist_matches += 1 for node in processor._get_required_nodes(None, yamlpath): req_node_matches += 1 for node in processor._get_nodes_by_traversal(None, yamlpath, 0): traversal_matches += 1 assert optional_matches == 0 assert must_exist_matches == 0 assert req_node_matches == 0 assert traversal_matches == 1 # A None node traverses into null
def test_no_attrs_to_arrays_error(self, quiet_logger): yamldata = """--- array: - one - two """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes("array.attr")) assert -1 < str(ex.value).find("Cannot add")
def test_get_nodes_array_impossible_type_error(self, quiet_logger): yamldata = """--- array: - 1 - 2 """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes(r"/array/(.=~/^.{3,4}$/)", default_value="New value")) assert -1 < str(ex.value).find("Cannot add")
def extract_values(data: {}, path: str) -> []: class Args: debug = True verbose = False quiet = True args = Args() log = ConsolePrinter(args) try: processor = Processor(log, data) nodes = processor.get_nodes(path) return [n.node for n in nodes] except YAMLPathException as ex: print(ex)
def _get_merge_target_nodes( self, insert_at: YAMLPath, lhs_proc: Processor, rhs: Any ) -> List[NodeCoords]: """Get a list of LHS insertion points for merge results.""" # Loop through all insertion points and the elements in RHS nodes: List[NodeCoords] = [] for node_coord in lhs_proc.get_nodes( insert_at, default_value=rhs ): nodes.append(node_coord) self.logger.debug( "Targetting these destinations for merge results:", prefix="Merger::_get_merge_target_nodes: ", data=nodes) return nodes
def test_no_index_to_hashes_error(self, quiet_logger): # Using [#] syntax is a disambiguated INDEX ELEMENT NUMBER. In # DICTIONARY context, this would create an ambiguous request to access # either the #th value or a value whose key is the literal #. As such, # an error is deliberately generated when [#] syntax is used against # dictionaries. When you actually want a DICTIONARY KEY that happens # to be an integer, omit the square braces, []. yamldata = """--- hash: key: value """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes("hash[6]")) assert -1 < str(ex.value).find("Cannot add")
def test_get_impossible_nodes_error(self, quiet_logger, yamlpath, mustexist): yamldata = """--- ints: - 1 - 2 - 3 - 4 - 5 floats: - 1.1 - 2.2 - 3.3 """ yaml = YAML() processor = Processor(quiet_logger, yaml.load(yamldata)) with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes(yamlpath, mustexist=mustexist)) assert -1 < str(ex.value).find("does not match any nodes")
def test_delete_nodes(self, quiet_logger, delete_yamlpath, pathseperator, old_deleted_nodes, new_flat_data): yamldata = """--- aliases: - &alias_number 1 - &alias_bool true number: 1 bool: true alias_number: *alias_number alias_bool: *alias_bool hash: number: 1 bool: true alias_number: *alias_number alias_bool: *alias_bool complex: hash: number: 1 bool: true alias_number: *alias_number alias_bool: *alias_bool records: - id: ABC data: 123 - id: BCD data: 987 - id: CDE data: 8B8 """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) # The return set must be received lest no nodes will be deleted deleted_nodes = [] for nc in processor.delete_nodes(delete_yamlpath, pathsep=pathseperator): deleted_nodes.append(nc) for (test_value, verify_node_coord) in zip(old_deleted_nodes, deleted_nodes): assert test_value, unwrap_node_coords(verify_node_coord) for (test_value, verify_node_coord) in zip(new_flat_data, processor.get_nodes("**")): assert test_value, unwrap_node_coords(verify_node_coord)
def test_cannot_add_novel_alias_keys(self, quiet_logger): yamldata = """--- anchorKeys: &keyOne aliasOne: 1 1 Alpha 1 &keyTwo aliasTwo: 2 2 Beta 2 hash: *keyOne : subval: 1.1 *keyTwo : subval: 2.2 """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) yamlpath = YAMLPath("hash[&keyThree].subval") newvalue = "Abort" with pytest.raises(YAMLPathException) as ex: nodes = list(processor.get_nodes(yamlpath)) assert -1 < str(ex.value).find("Cannot add")
def test_get_nodes(self, quiet_logger, yamlpath, results, mustexist, default): yamldata = """--- aliases: - &aliasAnchorOne Anchored Scalar Value - &aliasAnchorTwo Hey, Number Two! array_of_hashes: &arrayOfHashes - step: 1 name: one - step: 2 name: two rollback_hashes: on_condition: failure: - step: 3 name: three - step: 4 name: four disabled_steps: - 2 - 3 squads: alpha: 1.1 bravo: 2.2 charlie: 3.3 delta: 4.4 number_keys: 1: one 2: two 3: three """ yaml = YAML() processor = Processor(quiet_logger, yaml.load(yamldata)) matchidx = 0 for node in processor.get_nodes(yamlpath, mustexist=mustexist, default_value=default): assert node == results[matchidx] matchidx += 1 assert len(results) == matchidx
def test_key_anchor_children(self, quiet_logger): yamldata = """--- anchorKeys: &keyOne aliasOne: 1 1 Alpha 1 &keyTwo aliasTwo: 2 2 Beta 2 hash: *keyOne : subval: 1.1 *keyTwo : subval: 2.2 """ yaml = YAML() data = yaml.load(yamldata) processor = Processor(quiet_logger, data) yamlpath = YAMLPath("hash[&keyTwo].subval") newvalue = "Mute audibles" processor.set_value(yamlpath, newvalue, mustexist=True) matchtally = 0 for node in processor.get_nodes(yamlpath): assert unwrap_node_coords(node) == newvalue matchtally += 1 assert matchtally == 1
def check_playbook_file_removed_and_added(playbook_path): playbook_ok = True yaml_parser = Parsers.get_yaml_editor() logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False) log = ConsolePrinter(logging_args) # Find every path removed by a file Task (also matches tasks within blocks) files_absent_string = "tasks.**.file[state=absent][parent()].path" files_absent_yamlpath = YAMLPath(files_absent_string) path_editing_tasks_yamlpath = "" log.info("Info: Evaluating playbook '{}'".format(playbook_path)) (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml_parser, log, playbook_path) if not doc_loaded: # There was an issue loading the file; an error message has already been # printed via ConsolePrinter. return False processor = Processor(log, yaml_data) try: for node in processor.get_nodes(files_absent_yamlpath, mustexist=False): path = str(node) # 'node' is a NodeCoords. if path == 'None': continue elif "{{" in path: # Identified path is a Jinja expression, unfortunately there is no easy way to get # the actual path without making this test very complicated continue # Check if this paths is used in any of the following ansible modules ansible_modules = ["lineinfile", "blockinfile", "copy"] path_editing_tasks_string = "tasks.**.[.=~/{modules}/][*='{path}'][parent()].name" path_editing_tasks_yamlpath = YAMLPath( path_editing_tasks_string.format( modules="|".join(ansible_modules), path=node)) for task in processor.get_nodes(path_editing_tasks_yamlpath, mustexist=False): log.info( "Error: Task '{}' manipulates a file that is removed by another task" .format(task)) playbook_ok = False except YAMLPathException as ex: no_file_msg = ( "Cannot add PathSegmentTypes.TRAVERSE subreference to lists at 'None' " "in '{}'.") if str(ex) == no_file_msg.format(files_absent_string): log.info( "Info: Playbook {} has no 'file' tasks.".format(playbook_path)) elif path_editing_tasks_yamlpath and str(ex) == no_file_msg.format( path_editing_tasks_yamlpath): log.info("Info: Playbook {} has no '{}' tasks.".format( playbook_path, " ".join(ansible_modules))) else: log.info("Error: {}.".format(ex)) return playbook_ok
def test_good_multi_replacements(self, script_runner, tmp_path_factory, old_eyaml_keys, new_eyaml_keys, quiet_logger): from yamlpath.func import unwrap_node_coords from yamlpath.common import Parsers from yamlpath import Processor from yamlpath.eyaml import EYAMLProcessor simple_content = """--- encrypted_string: ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAHA4rPcTzvgzPLtnGz3yoyX/kVlQ5TnPXcScXK2bwjguGZLkuzv/JVPAsOm4t6GlnROpy4zb/lUMHRJDChJhPLrSj919B8//huoMgw0EU5XTcaN6jeDDjL+vhjswjvLFOux66UwvMo8sRci/e2tlFiam8VgxzV0hpF2qRrL/l84V04gL45kq4PCYDWrJNynOwYVbSIF+qc5HaF25H8kHq1lD3RB6Ob/J942Q7k5Qt7W9mNm9cKZmxwgtUgIZWXW6mcPJ2dXDB/RuPJJSrLsb1VU/DkhdgxaNzvLCA+MViyoFUkCfHFNZbaHKNkoYXBy7dLmoh/E5tKv99FeG/7CzL3DBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCVU5Mjt8+4dLkoqB9YArfkgCDkdIhXR9T1M4YYa1qTE6by61VPU3g1aMExRmo4tNZ8FQ==] encrypted_block: > ENC[PKCS7,MIIBeQYJKoZIhvcNAQcDoIIBajCCAWYCAQAxggEhMIIBHQIBADAFMAACAQEw DQYJKoZIhvcNAQEBBQAEggEAnxQVqyIgRTb/+VP4Q+DLJcnlS8YPouXEW8+z it9uwUA02CEPxCEU944GcHpgTY3EEtkm+2Z/jgXI119VMML+OOQ1NkwUiAw/ wq0vwz2D16X31XzhedQN5FZbfZ1C+2tWSQfCjE0bu7IeHfyR+k2ssD11kNZh JDEr2bM2dwOdT0y7VGcQ06vI9gw6UXcwYAgS6FoLm7WmFftjcYiNB+0EJSW0 VcTn2gveaw9iOQcum/Grby+9Ybs28fWd8BoU+ZWDpoIMEceujNa9okIXNPJO jcvv1sgauwJ3RX6WFQIy/beS2RT5EOLhWIZCAQCcgJWgovu3maB7dEUZ0NLG OYUR7zA8BgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAbO16EzQ5/cdcvgB0g tpKIgBAEgTLT5n9Jtc9venK0CKso] """ anchored_content = """--- aliases: - &blockStyle > ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEw DQYJKoZIhvcNAQEBBQAEggEArvk6OYa1gACTdrWq2SpCrtGRlc61la5AGU7L aLTyKfqD9vqx71RDjobfOF96No07kLsEpoAJ+LKKHNjdG6kjvpGPmttj9Dkm XVoU6A+YCmm4iYFKD/NkoSOEyAkoDOXSqdjrgt0f37GefEsXt6cqAavDpUJm pmc0KI4TCG5zpfCxqttMs+stOY3Y+0WokkulQujZ7K3SdWUSHIysgMrWiect Wdg5unxN1A/aeyvhgvYSNPjU9KBco7SDnigSs9InW/QghJFrZRrDhTp1oTUc qK5lKvaseHkVGi91vPWeLQxZt1loJB5zL6j5BxMbvRfJK+wc3ax2u4x8WTAB EurCwzBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAwcy7jvcOGcMfLEtug LEXbgCBkocdckuDe14mVGmUmM++xN34OEVRCeGVWWUnWq1DJ4Q==] - &stringStyle ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAIu44u62q5sVfzC7kytLi2Z/EzH2DKr4vDsoqDBeSZ71aRku/uSrjyiO4lyoq9Kva+eBAyjBay5fnqPVBaU3Rud2pdEoZEoyofi02jn4hxUKpAO1W0AUgsQolGe53qOdM4U8RbwnTR0gr3gp2mCd18pH3SRMP9ryrsBAxGzJ6mR3RgdZnlTlqVGXCeWUeVpbH+lcHw3uvd+o/xkvJ/3ypxz+rWILiAZ3QlCirzn/qb2fHuKf3VBh8RVFuQDaM5voajZlgjD6KzNCsbATOqOA6eJI4j0ngPdDlIjGHAnahuyluQ5f5SIaIjLC+ZeCOfIYni0MQ+BHO0JNbccjq2Unb7TBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCYmAI0Ao3Ok1cSmVw0SgQGgCBK62z1r5RfRjf1xKfqDxTsGUHfsUmM3EjGJfnWzCRvuQ==] block: *blockStyle string: *stringStyle yet_another: 'more.complex.child': *blockStyle """ simple_file = create_temp_yaml_file(tmp_path_factory, simple_content) anchored_file = create_temp_yaml_file(tmp_path_factory, anchored_content) result = script_runner.run( self.command, "--newprivatekey={}".format(new_eyaml_keys[0]), "--newpublickey={}".format(new_eyaml_keys[1]), "--oldprivatekey={}".format(old_eyaml_keys[0]), "--oldpublickey={}".format(old_eyaml_keys[1]), simple_file, anchored_file ) assert result.success, result.stderr with open(simple_file, 'r') as fhnd: simple_data = fhnd.read() with open(anchored_file, 'r') as fhnd: anchored_data = fhnd.read() assert not simple_data == simple_content assert not anchored_data == anchored_content # Verify that block and string formatting is correct yaml = Parsers.get_yaml_editor() (yaml_rotated_data, doc_loaded) = Parsers.get_yaml_data( yaml, quiet_logger, anchored_data, literal=True) if not doc_loaded: # An error message has already been logged assert False, "Rotated anchored data failed to load" source_processor = Processor(quiet_logger, yaml_rotated_data) for node in source_processor.get_nodes('/block', mustexist=True): assert not ' ' in unwrap_node_coords(node) # Test that the pre- and post-rotated values are identical (yaml_anchored_data, doc_loaded) = Parsers.get_yaml_data( yaml, quiet_logger, anchored_content, literal=True) if not doc_loaded: # An error message has already been logged assert False, "Original anchored data failed to load" (yaml_rotated_data, doc_loaded) = Parsers.get_yaml_data( yaml, quiet_logger, anchored_data, literal=True) if not doc_loaded: # An error message has already been logged assert False, "Rotated anchored data failed to load" source_processor = EYAMLProcessor( quiet_logger, yaml_anchored_data, privatekey=old_eyaml_keys[0], publickey=old_eyaml_keys[1]) for node in source_processor.get_eyaml_values( '/block', True ): assert unwrap_node_coords(node) == 'This is a test value.' rotated_processor = EYAMLProcessor( quiet_logger, yaml_rotated_data, privatekey=new_eyaml_keys[0], publickey=new_eyaml_keys[1]) for node in rotated_processor.get_eyaml_values( '/block', True ): assert unwrap_node_coords(node) == 'This is a test value.'
def transform(cls, configuration: Configuration): logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False) log = ConsolePrinter(logging_args) processor = Processor(log, configuration.config) # [.!<100] effectively means that the value is non-numerical paths_to_hashes = [ # # branches, old syntax "**.push_access_level[.!<100]", "**.merge_access_level[.!<100]", "**.unprotect_access_level[.!<100]", # members & group members "**.access_level[.!<100]", "**.group_access[.!<100]", # old syntax "**.group_access_level[.!<100]", # tags "**.create_access_level[.!<100]", ] for path in paths_to_hashes: try: for node_coordinate in processor.get_nodes(path): try: access_level_string = str(node_coordinate.node) node_coordinate.parent[ node_coordinate.parentref ] = AccessLevel.get_value(access_level_string) except KeyError: fatal( f"Configuration string '{access_level_string}' is not one of the valid access levels:" f" {', '.join(AccessLevel.get_canonical_names())}", exit_code=EXIT_INVALID_INPUT, ) except YAMLPathException: pass # there are different than the above, as they are elements of arrays # so we need different search query and an extra condition for # transformation paths_to_arrays = [ # # branches, new GitLab Premium syntax "**.allowed_to_push.*.[access_level!<100]", "**.allowed_to_merge.*.[access_level!<100]", "**.allowed_to_unprotect.*.[access_level!<100]", ] for path in paths_to_arrays: try: for node_coordinate in processor.get_nodes(path): if node_coordinate.parentref == "access_level": try: access_level_string = str(node_coordinate.node) node_coordinate.parent[ node_coordinate.parentref ] = AccessLevel.get_value(access_level_string) except KeyError: fatal( f"Configuration string '{access_level_string}' is not one of the valid access levels:" f" {', '.join(AccessLevel.get_canonical_names())}", exit_code=EXIT_INVALID_INPUT, ) except YAMLPathException: pass
def test_get_nodes(self, quiet_logger, yamlpath, results, mustexist, default): yamldata = """--- aliases: - &aliasAnchorOne Anchored Scalar Value - &aliasAnchorTwo Hey, Number Two! array_of_hashes: &arrayOfHashes - step: 1 name: one - step: 2 name: two rollback_hashes: on_condition: failure: - step: 3 name: three - step: 4 name: four disabled_steps: - 2 - 3 squads: alpha: 1.1 bravo: 2.2 charlie: 3.3 delta: 4.4 number_keys: 1: one 2: two 3: three # For traversal tests: name: Name 0-0 lots_of_names: name: Name 1-1 tier1: name: Name 2-1 tier2: name: Name 3-1 list_of_named_objects: - name: Name 4-1 tag: Tag 4-1 other: Other 4-1 dude: Dude 4-1 - tag: Tag 4-2 name: Name 4-2 dude: Dude 4-2 other: Other 4-2 - other: Other 4-3 dude: Dude 4-3 tag: Tag 4-3 name: Name 4-3 - dude: Dude 4-4 tag: Tag 4-4 name: Name 4-4 other: Other 4-4 ############################################################################### # For descendent searching: products_hash: doodad: availability: start: date: 2020-10-10 time: 08:00 stop: date: 2020-10-29 time: 17:00 dimensions: width: 5 height: 5 depth: 5 weight: 10 doohickey: availability: start: date: 2020-08-01 time: 10:00 stop: date: 2020-09-25 time: 10:00 dimensions: width: 1 height: 2 depth: 3 weight: 4 widget: availability: start: date: 2020-01-01 time: 12:00 stop: date: 2020-01-01 time: 16:00 dimensions: width: 9 height: 10 depth: 1 weight: 4 products_array: - product: doodad availability: start: date: 2020-10-10 time: 08:00 stop: date: 2020-10-29 time: 17:00 dimensions: width: 5 height: 5 depth: 5 weight: 10 - product: doohickey availability: start: date: 2020-08-01 time: 10:00 stop: date: 2020-09-25 time: 10:00 dimensions: width: 1 height: 2 depth: 3 weight: 4 - product: widget availability: start: date: 2020-01-01 time: 12:00 stop: date: 2020-01-01 time: 16:00 dimensions: width: 9 height: 10 depth: 1 weight: 4 ############################################################################### """ yaml = YAML() processor = Processor(quiet_logger, yaml.load(yamldata)) matchidx = 0 for node in processor.get_nodes( yamlpath, mustexist=mustexist, default_value=default ): assert unwrap_node_coords(node) == results[matchidx] matchidx += 1 assert len(results) == matchidx
def merge_with(self, rhs: Any) -> None: """ Merge this document with another. Parameters: 1. rhs (Any) The document to merge into this one. Returns: N/A Raises: - `MergeException` when a clean merge is impossible. """ # Do nothing when RHS is None (empty document) if rhs is None: return # Remove all comments (no sensible way to merge them) Parsers.delete_all_comments(rhs) # When LHS is None (empty document), just dump all of RHS into it, # honoring any --mergeat|-m location as best as possible. insert_at = self.config.get_insertion_point() if self.data is None: self.logger.debug("Replacing None data with:", prefix="Merger::merge_with: ", data=rhs, data_header=" *****") self.data = Nodes.build_next_node(insert_at, 0, rhs) self.logger.debug("Merged document is now:", prefix="Merger::merge_with: ", data=self.data, footer=" ***** ***** *****") if isinstance(rhs, (dict, list)): # Only Scalar values need further processing return # Resolve any anchor conflicts self._resolve_anchor_conflicts(rhs) # Prepare the merge rules self.config.prepare(rhs) # Identify a reasonable default should a DOM need to be built up to # receive the RHS data. default_val = rhs if isinstance(rhs, CommentedMap): default_val = {} elif isinstance(rhs, CommentedSeq): default_val = [] # Loop through all insertion points and the elements in RHS merge_performed = False nodes: List[NodeCoords] = [] lhs_proc = Processor(self.logger, self.data) for node_coord in lhs_proc.get_nodes(insert_at, default_value=default_val): nodes.append(node_coord) for node_coord in nodes: target_node = (node_coord.node if isinstance( node_coord.node, (CommentedMap, CommentedSeq)) else node_coord.parent) Parsers.set_flow_style(rhs, (target_node.fa.flow_style() if hasattr( target_node, "fa") else None)) if isinstance(rhs, CommentedMap): # The RHS document root is a map if isinstance(target_node, CommentedSeq): # But the destination is a list self._merge_lists(target_node, CommentedSeq([rhs]), insert_at) else: self._merge_dicts(target_node, rhs, insert_at) # Synchronize YAML Tags self.logger.debug( "Merger::merge_with: Setting LHS tag from {} to {}.". format(target_node.tag.value, rhs.tag.value)) target_node.yaml_set_tag(rhs.tag.value) merge_performed = True elif isinstance(rhs, CommentedSeq): # The RHS document root is a list self._merge_lists(target_node, rhs, insert_at) merge_performed = True # Synchronize any YAML Tag self.logger.debug( "Merger::merge_with: Setting LHS tag from {} to {}.". format(target_node.tag.value, rhs.tag.value)) target_node.yaml_set_tag(rhs.tag.value) else: # The RHS document root is a Scalar value target_node = node_coord.node if isinstance(target_node, CommentedSeq): Nodes.append_list_element(target_node, rhs) merge_performed = True elif isinstance(target_node, CommentedMap): raise MergeException( "Impossible to add Scalar value, {}, to a Hash without" " a key. Change the value to a 'key: value' pair, a" " '{{key: value}}' Hash, or change the merge target to" " an Array or other Scalar value.".format(rhs), insert_at) else: lhs_proc.set_value(insert_at, rhs) merge_performed = True self.logger.debug("Completed merge operation, resulting in document:", prefix="Merger::merge_with: ", data=self.data) if not merge_performed: raise MergeException( "A merge was not performed. Ensure your target path matches" " at least one node in the left document(s).", insert_at)