def _parse_yaml(source: str, config_string: bool): logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False) log = ConsolePrinter(logging_args) yaml = Parsers.get_yaml_editor() # for better backward compatibility with PyYAML (that supports only YAML 1.1) used in the previous # GitLabForm versions, let's force ruamel.yaml to use YAML version 1.1 by default too yaml.version = (1, 1) if config_string: config_string = textwrap.dedent(source) verbose("Reading config from the provided string.") (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml, log, config_string, literal=True) else: config_path = source verbose(f"Reading config from file: {config_path}") (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml, log, config_path) if doc_loaded: debug("Config parsed successfully as YAML.") else: # an error message has already been printed via ConsolePrinter exit(EXIT_INVALID_INPUT) return yaml_data
def test_error_quiet_exit(self, capsys): args = SimpleNamespace(verbose=False, quiet=True, debug=False) logger = ConsolePrinter(args) with pytest.raises(SystemExit): logger.error("Test", 27) console = capsys.readouterr() assert console.err == "ERROR: Test\n"
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) # For the remainder of processing, overwrite overwrites output if args.overwrite: args.output = args.overwrite # Merge all input files yaml_editor = Parsers.get_yaml_editor() merge_config = MergerConfig(log, args) exit_state = 0 consumed_stdin = False mergers: List[Merger] = [] merge_count = 0 for yaml_file in args.yaml_files: if yaml_file.strip() == '-': consumed_stdin = True log.debug( "yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) if len(mergers) < 1: (mergers, mergers_loaded) = get_doc_mergers( log, yaml_editor, merge_config, yaml_file) if not mergers_loaded: exit_state = 4 break else: # Merge RHS into LHS exit_state = merge_docs( log, yaml_editor, merge_config, mergers, yaml_file) if not exit_state == 0: break merge_count += 1 # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty() ): exit_state = merge_docs(log, yaml_editor, merge_config, mergers, "-") merge_count += 1 # When no merges have occurred, check for a single-doc merge request if (exit_state == 0 and merge_count == 0 and merge_config.get_multidoc_mode() is MultiDocModes.CONDENSE_ALL ): exit_state = merge_condense_all(log, mergers, []) # Output the final document if exit_state == 0: write_output_document(args, log, yaml_editor, mergers) sys.exit(exit_state)
def main(): """Main code.""" # Process any command-line arguments args = processcli() log = ConsolePrinter(args) validateargs(args, log) exit_state = 0 consumed_stdin = False yaml = Parsers.get_yaml_editor() for yaml_file in args.yaml_files: if yaml_file.strip() == '-': consumed_stdin = True log.debug("yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) proc_state = process_file(log, yaml, yaml_file) if proc_state != 0: exit_state = proc_state # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty()): exit_state = process_file(log, yaml, "-") sys.exit(exit_state)
def test_debug_noisy(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=True) logger = ConsolePrinter(args) anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor") anchoredval = PlainScalarString("TestVal", anchor="Anchor") logger.debug(anchoredval) console = capsys.readouterr() assert "\n".join([ "DEBUG: TestVal; &Anchor", ]) + "\n" == console.out logger.debug(["test", anchoredval]) console = capsys.readouterr() assert "\n".join([ "DEBUG: [0]=test", "DEBUG: [1]=TestVal; &Anchor", ]) + "\n" == console.out logger.debug({"ichi": 1, anchoredkey: anchoredval}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]=>1", "DEBUG: [TestKey; &KeyAnchor]=>TestVal; &Anchor", ]) + "\n" == console.out
def test_critical_quiet(self, capsys): args = SimpleNamespace(verbose=False, quiet=True, debug=False) logger = ConsolePrinter(args) with pytest.raises(SystemExit): logger.critical("Test") console = capsys.readouterr() assert console.err == "CRITICAL: Test\n"
def test_null_docs_have_nothing_to_delete(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=True) logger = ConsolePrinter(args) processor = Processor(logger, None) deleted_nodes = [] for nc in processor.delete_nodes("**"): deleted_nodes.append(nc) console = capsys.readouterr() assert "Refusing to delete nodes from a null document" in console.out
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) exit_state = 0 lhs_file = args.yaml_files[0] rhs_file = args.yaml_files[1] lhs_yaml = Parsers.get_yaml_editor() rhs_yaml = Parsers.get_yaml_editor() (lhs_docs, lhs_loaded) = get_docs(log, lhs_yaml, lhs_file) (rhs_docs, rhs_loaded) = get_docs(log, rhs_yaml, rhs_file) lhs_doc_count = len(lhs_docs) if lhs_loaded else 0 rhs_doc_count = len(rhs_docs) if rhs_loaded else 0 lhs_idx_set = (hasattr(args, "left_document_index") and args.left_document_index is not None) rhs_idx_set = (hasattr(args, "right_document_index") and args.right_document_index is not None) if not (lhs_loaded and rhs_loaded): # An error message has already been logged sys.exit(1) if lhs_doc_count > 1 and not lhs_idx_set: log.critical( ("--left-document-index|-L must be set; the source contains {}" " documents.").format(lhs_doc_count), 1) lhs_index = args.left_document_index if lhs_idx_set else 0 lhs_document = get_doc(log, lhs_docs, lhs_index) if rhs_doc_count > 1 and not rhs_idx_set: log.critical( ("--right-document-index|-R must be set; the source contains {}" " documents.").format(rhs_doc_count), 1) rhs_index = args.right_document_index if rhs_idx_set else 0 rhs_document = get_doc(log, rhs_docs, rhs_index) diff = Differ(DifferConfig(log, args), log, lhs_document, ignore_eyaml_values=args.ignore_eyaml_values, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) try: diff.compare_to(rhs_document) except EYAMLCommandException as ex: log.critical(ex, 1) exit_state = 1 if print_report(log, args, diff) else 0 sys.exit(exit_state)
def extract_values(data: {}, path: str) -> []: class Args: debug = True verbose = False quiet = True args = Args() log = ConsolePrinter(args) try: processor = Processor(log, data) nodes = processor.get_nodes(path) return [n.node for n in nodes] except YAMLPathException as ex: print(ex)
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) yaml_path = YAMLPath(args.query, pathsep=args.pathsep) # Prep the YAML parser yaml = Parsers.get_yaml_editor() # Attempt to open the YAML file; check for parsing errors (yaml_data, doc_loaded) = Parsers.get_yaml_data( yaml, log, args.yaml_file if args.yaml_file else "-") if not doc_loaded: # An error message has already been logged sys.exit(1) # Seek the queried value(s) discovered_nodes = [] processor = EYAMLProcessor(log, yaml_data, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) try: for node in processor.get_eyaml_values(yaml_path, mustexist=True): log.debug("Got node from {}:".format(yaml_path), data=node, prefix="yaml_get::main: ") discovered_nodes.append(NodeCoords.unwrap_node_coords(node)) except YAMLPathException as ex: log.critical(ex, 1) except EYAMLCommandException as ex: log.critical(ex, 2) try: for node in discovered_nodes: if isinstance(node, (dict, list, CommentedSet)): print(json.dumps(Parsers.jsonify_yaml_data(node))) else: if node is None: node = "\x00" print("{}".format(str(node).replace("\n", r"\n"))) except RecursionError: log.critical( "The YAML data contains an infinitely recursing YAML Alias!", 1)
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) # For the remainder of processing, overwrite overwrites output if args.overwrite: args.output = args.overwrite # Merge all input files merger = Merger(log, None, MergerConfig(log, args)) yaml_editor = Parsers.get_yaml_editor() exit_state = 0 consumed_stdin = False merger_primed = False for yaml_file in args.yaml_files: if yaml_file.strip() == '-': consumed_stdin = True log.debug( "yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) proc_state = process_yaml_file( merger, log, yaml_editor, yaml_file, merger_primed) if proc_state == 0: merger_primed = True else: exit_state = proc_state break # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty() ): exit_state = process_yaml_file( merger, log, yaml_editor, '-', merger_primed) # Output the final document if exit_state == 0: write_output_document(args, log, merger, yaml_editor) sys.exit(exit_state)
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) exit_state = 0 lhs_file = args.yaml_files[0] rhs_file = args.yaml_files[1] lhs_yaml = Parsers.get_yaml_editor() rhs_yaml = Parsers.get_yaml_editor() (lhs_document, doc_loaded) = Parsers.get_yaml_data(lhs_yaml, log, lhs_file) if not doc_loaded: # An error message has already been logged sys.exit(1) (rhs_document, doc_loaded) = Parsers.get_yaml_data(rhs_yaml, log, rhs_file) if not doc_loaded: # An error message has already been logged sys.exit(1) diff = Differ(DifferConfig(log, args), log, lhs_document, ignore_eyaml_values=args.ignore_eyaml_values, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) try: diff.compare_to(rhs_document) except EYAMLCommandException as ex: log.critical(ex, 1) exit_state = 1 if print_report(log, args, diff) else 0 sys.exit(exit_state)
def main(): """Main code.""" # Process any command-line arguments args = processcli() log = ConsolePrinter(args) validateargs(args, log) search_values = True search_keys = False include_key_aliases = False include_value_aliases = False if args.onlykeynames: search_values = False search_keys = True elif args.keynames: search_keys = True if args.include_aliases is IncludeAliases.INCLUDE_ALL_ALIASES: include_key_aliases = True include_value_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_KEY_ALIASES: include_key_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_VALUE_ALIASES: include_value_aliases = True # Prepare the YAML processor yaml = Parsers.get_yaml_editor() processor = EYAMLProcessor( log, None, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) # Process the input file(s) exit_state = 0 file_tally = -1 consumed_stdin = False for yaml_file in args.yaml_files: file_tally += 1 if yaml_file.strip() == "-": consumed_stdin = True log.debug( "yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) proc_state = process_yaml_file( args, yaml, log, yaml_file, processor, search_values, search_keys, include_key_aliases, include_value_aliases, file_tally ) if proc_state != 0: exit_state = proc_state # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty() ): file_tally += 1 exit_state = process_yaml_file( args, yaml, log, "-", processor, search_values, search_keys, include_key_aliases, include_value_aliases, file_tally ) sys.exit(exit_state)
def main(): """Main code.""" # Process any command-line arguments args = processcli() log = ConsolePrinter(args) validateargs(args, log) processor = EYAMLProcessor(log, None, binary=args.eyaml) # Prep the YAML parser yaml = Parsers.get_yaml_editor() # Process the input file(s) in_file_count = len(args.yaml_files) exit_state = 0 for yaml_file in args.yaml_files: file_changed = False backup_file = yaml_file + ".bak" seen_anchors = [] # Each YAML_FILE must actually be a file if not isfile(yaml_file): log.error("Not a file: {}".format(yaml_file)) exit_state = 2 continue # Don't bother with the file change update when there's only one input # file. if in_file_count > 1: log.info("Processing {}...".format(yaml_file)) # Try to open the file (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml, log, yaml_file) if not doc_loaded: # An error message has already been logged exit_state = 3 continue # Process all EYAML values processor.data = yaml_data for yaml_path in processor.find_eyaml_paths(): # Use ::get_nodes() instead of ::get_eyaml_values() here in order # to ignore values that have already been rotated via their # Anchors. for node_coordinate in processor.get_nodes(yaml_path, mustexist=True): # Ignore values which are Aliases for those already decrypted node = node_coordinate.node anchor_name = Anchors.get_node_anchor(node) if anchor_name is not None: if anchor_name in seen_anchors: continue seen_anchors.append(anchor_name) log.verbose("Decrypting value(s) at {}.".format(yaml_path)) processor.publickey = args.oldpublickey processor.privatekey = args.oldprivatekey try: txtval = processor.decrypt_eyaml(node) except EYAMLCommandException as ex: log.error(ex) exit_state = 3 continue # Prefer block (folded) values unless the original YAML value # was already a massivly long (string) line. output = EYAMLOutputFormats.BLOCK if not isinstance(node, FoldedScalarString): output = EYAMLOutputFormats.STRING # Re-encrypt the value with new EYAML keys processor.publickey = args.newpublickey processor.privatekey = args.newprivatekey try: processor.set_eyaml_value(yaml_path, txtval, output=output) except EYAMLCommandException as ex: log.error(ex) exit_state = 3 continue file_changed = True # Save the changes if file_changed: if args.backup: log.verbose("Saving a backup of {} to {}.".format( yaml_file, backup_file)) if exists(backup_file): remove(backup_file) copy2(yaml_file, backup_file) log.verbose("Writing changed data to {}.".format(yaml_file)) with open(yaml_file, 'w', encoding='utf-8') as yaml_dump: yaml.dump(yaml_data, yaml_dump) sys.exit(exit_state)
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) change_path = YAMLPath(args.change, pathsep=args.pathsep) backup_file = args.yaml_file + ".bak" # Obtain the replacement value if args.value: new_value = args.value elif args.stdin: new_value = ''.join(sys.stdin.readlines()) elif args.file: with open(args.file, 'r') as fhnd: new_value = fhnd.read().rstrip() elif args.random is not None: new_value = ''.join( secrets.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(args.random)) # Prep the YAML parser yaml = get_yaml_editor() # Attempt to open the YAML file; check for parsing errors yaml_data = get_yaml_data(yaml, log, args.yaml_file) if yaml_data is None: # An error message has already been logged exit(1) # Load the present value at the specified YAML Path change_nodes = [] old_format = YAMLValueFormats.DEFAULT processor = EYAMLProcessor(log, yaml_data, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) try: for node in processor.get_nodes( change_path, mustexist=(args.mustexist or args.saveto), default_value=("" if new_value else " ")): log.debug('Got "{}" from {}.'.format(node, change_path)) change_nodes.append(node) except YAMLPathException as ex: log.critical(ex, 1) if len(change_nodes) == 1: # When there is exactly one result, its old format can be known. This # is necessary to retain whether the replacement value should be # represented later as a multi-line string when the new value is to be # encrypted. old_format = YAMLValueFormats.from_node(change_nodes[0]) log.debug("Collected nodes:") log.debug(change_nodes) # Check the value(s), if desired if args.check: for node in change_nodes: if processor.is_eyaml_value(node): # Sanity check: If either --publickey or --privatekey were set # then they must both be set in order to decrypt this value. # This is enforced only when the value must be decrypted due to # a --check request. if ((args.publickey and not args.privatekey) or (args.privatekey and not args.publickey)): log.error( "Neither or both private and public EYAML keys must be" + " set when --check is required to decrypt the old" + " value.") exit(1) try: check_value = processor.decrypt_eyaml(node) except EYAMLCommandException as ex: log.critical(ex, 1) else: check_value = node if not args.check == check_value: log.critical( '"{}" does not match the check value.'.format(args.check), 20) # Save the old value, if desired and possible if args.saveto: # Only one can be saved; otherwise it is impossible to meaningfully # convey to the end-user from exactly which other YAML node each saved # value came. if len(change_nodes) > 1: log.critical( "It is impossible to meaningly save more than one matched" + " value. Please omit --saveto or set --change to affect" + " exactly one value.", 1) saveto_path = YAMLPath(args.saveto, pathsep=args.pathsep) log.verbose("Saving the old value to {}.".format(saveto_path)) # Folded EYAML values have their embedded newlines converted to spaces # when read. As such, writing them back out breaks their original # format, despite being properly typed. To restore the original # written form, reverse the conversion, here. old_value = change_nodes[0] if ((old_format is YAMLValueFormats.FOLDED or old_format is YAMLValueFormats.LITERAL) and EYAMLProcessor.is_eyaml_value(old_value)): old_value = old_value.replace(" ", "\n") try: processor.set_value(saveto_path, clone_node(old_value), value_format=old_format) except YAMLPathException as ex: log.critical(ex, 1) # Set the requested value log.verbose("Setting the new value for {}.".format(change_path)) if args.eyamlcrypt: # If the user hasn't specified a format, use the same format as the # value being replaced, if known. format_type = YAMLValueFormats.from_str(args.format) if format_type is YAMLValueFormats.DEFAULT: format_type = old_format output_type = EYAMLOutputFormats.STRING if format_type in [YAMLValueFormats.FOLDED, YAMLValueFormats.LITERAL]: output_type = EYAMLOutputFormats.BLOCK try: processor.set_eyaml_value(change_path, new_value, output=output_type, mustexist=False) except EYAMLCommandException as ex: log.critical(ex, 2) else: processor.set_value(change_path, new_value, value_format=args.format) # Save a backup of the original file, if requested if args.backup: log.verbose("Saving a backup of {} to {}.".format( args.yaml_file, backup_file)) if exists(backup_file): remove(backup_file) copy2(args.yaml_file, backup_file) # Save the changed file log.verbose("Writing changed data to {}.".format(args.yaml_file)) with open(args.yaml_file, 'w') as yaml_dump: yaml.dump(yaml_data, yaml_dump)
def check_playbook_file_removed_and_added(playbook_path): playbook_ok = True yaml_parser = Parsers.get_yaml_editor() logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False) log = ConsolePrinter(logging_args) # Find every path removed by a file Task (also matches tasks within blocks) files_absent_string = "tasks.**.file[state=absent][parent()].path" files_absent_yamlpath = YAMLPath(files_absent_string) path_editing_tasks_yamlpath = "" log.info("Info: Evaluating playbook '{}'".format(playbook_path)) (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml_parser, log, playbook_path) if not doc_loaded: # There was an issue loading the file; an error message has already been # printed via ConsolePrinter. return False processor = Processor(log, yaml_data) try: for node in processor.get_nodes(files_absent_yamlpath, mustexist=False): path = str(node) # 'node' is a NodeCoords. if path == 'None': continue elif "{{" in path: # Identified path is a Jinja expression, unfortunately there is no easy way to get # the actual path without making this test very complicated continue # Check if this paths is used in any of the following ansible modules ansible_modules = ["lineinfile", "blockinfile", "copy"] path_editing_tasks_string = "tasks.**.[.=~/{modules}/][*='{path}'][parent()].name" path_editing_tasks_yamlpath = YAMLPath( path_editing_tasks_string.format( modules="|".join(ansible_modules), path=node)) for task in processor.get_nodes(path_editing_tasks_yamlpath, mustexist=False): log.info( "Error: Task '{}' manipulates a file that is removed by another task" .format(task)) playbook_ok = False except YAMLPathException as ex: no_file_msg = ( "Cannot add PathSegmentTypes.TRAVERSE subreference to lists at 'None' " "in '{}'.") if str(ex) == no_file_msg.format(files_absent_string): log.info( "Info: Playbook {} has no 'file' tasks.".format(playbook_path)) elif path_editing_tasks_yamlpath and str(ex) == no_file_msg.format( path_editing_tasks_yamlpath): log.info("Info: Playbook {} has no '{}' tasks.".format( playbook_path, " ".join(ansible_modules))) else: log.info("Error: {}.".format(ex)) return playbook_ok
def test_debug_noisy(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=True) logger = ConsolePrinter(args) anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor") anchoredval = PlainScalarString("TestVal", anchor="Anchor") foldedstr = "123456789 123456789 123456789" foldedstrfolds = [10, 20] foldedval = FoldedScalarString(foldedstr) foldedval.fold_pos = foldedstrfolds logger.debug(anchoredval) console = capsys.readouterr() assert "\n".join([ "DEBUG: (&Anchor)TestVal", ]) + "\n" == console.out logger.debug(["test", anchoredval]) console = capsys.readouterr() assert "\n".join([ "DEBUG: [0]test<class 'str'>", "DEBUG: [1](&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>", ]) + "\n" == console.out logger.debug({"ichi": 1, anchoredkey: anchoredval}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]1<class 'int'>", "DEBUG: [TestKey](&KeyAnchor,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>", ]) + "\n" == console.out logger.debug({"ichi": 1, anchoredkey: "non-anchored value"}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]1<class 'int'>", "DEBUG: [TestKey](&KeyAnchor,_)non-anchored value<class 'str'>", ]) + "\n" == console.out logger.debug({"ichi": 1, "non-anchored-key": anchoredval}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]1<class 'int'>", "DEBUG: [non-anchored-key](_,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>", ]) + "\n" == console.out tagged_value = "value" tagged_value_node = TaggedScalar(tagged_value, tag="!tag") tagged_sequence = CommentedSeq(["a", "b"]) tagged_sequence.yaml_set_tag("!raz") selfref_value = "self_referring" selfref_value_node = TaggedScalar(selfref_value, tag="!self_referring") logger.debug( "test_wrappers_consoleprinter:", prefix="test_debug_noisy: ", header="--- HEADER ---", footer="=== FOOTER ===", data_header="+++ DATA HEADER +++", data_footer="::: DATA FOOTER :::", data=CommentedMap({ "key": "value", "tagged": tagged_value_node, tagged_value_node: "untagged value", selfref_value_node: selfref_value_node, "array": ["ichi", "ni", "san"], "tagged_array": tagged_sequence, "aoh": [{"id": 1},{"id": 2},{"id": 3}], "aoa": [[True, True], [True, False], [False, True]], "dod": {"a": {"b": {"c": "d"}}}, "set": CommentedSet(["one", "two"]), }) ) console = capsys.readouterr() assert "\n".join([ "DEBUG: test_debug_noisy: --- HEADER ---", "DEBUG: test_debug_noisy: test_wrappers_consoleprinter:", "DEBUG: test_debug_noisy: +++ DATA HEADER +++", "DEBUG: test_debug_noisy: [key]value<class 'str'>", "DEBUG: test_debug_noisy: [tagged]<_,!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)", "DEBUG: test_debug_noisy: [value]<!tag,_>untagged value<class 'str'>", "DEBUG: test_debug_noisy: [self_referring]<!self_referring,!self_referring>self_referring<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)", "DEBUG: test_debug_noisy: [array][0]ichi<class 'str'>", "DEBUG: test_debug_noisy: [array][1]ni<class 'str'>", "DEBUG: test_debug_noisy: [array][2]san<class 'str'>", "DEBUG: test_debug_noisy: [tagged_array]<_,!raz>[0]a<class 'str'>", "DEBUG: test_debug_noisy: [tagged_array]<_,!raz>[1]b<class 'str'>", "DEBUG: test_debug_noisy: [aoh][0][id]1<class 'int'>", "DEBUG: test_debug_noisy: [aoh][1][id]2<class 'int'>", "DEBUG: test_debug_noisy: [aoh][2][id]3<class 'int'>", "DEBUG: test_debug_noisy: [aoa][0][0]True<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][0][1]True<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][1][0]True<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][1][1]False<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][2][0]False<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][2][1]True<class 'bool'>", "DEBUG: test_debug_noisy: [dod][a][b][c]d<class 'str'>", "DEBUG: test_debug_noisy: [set]{one}<class 'str'>", "DEBUG: test_debug_noisy: [set]{two}<class 'str'>", "DEBUG: test_debug_noisy: ::: DATA FOOTER :::", "DEBUG: test_debug_noisy: === FOOTER ===", ]) + "\n" == console.out logger.debug(tagged_value_node) console = capsys.readouterr() assert "\n".join([ "DEBUG: <!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)", ]) logger.debug(tagged_sequence) console = capsys.readouterr() assert "\n".join([ "DEBUG: [tagged_array]<!raz>[0]a<class 'str'>", "DEBUG: [tagged_array]<!raz>[1]b<class 'str'>", ]) nc = NodeCoords( "value", dict(key="value"), "key", YAMLPath("doc_root.key"), [ (dict(doc_root=dict(key="value")), "doc_root"), (dict(key="value"), "key")], (PathSegmentTypes.KEY, "key") ) logger.debug( "A node coordinate:", prefix="test_debug_noisy: ", data=nc) console = capsys.readouterr() assert "\n".join([ "DEBUG: test_debug_noisy: A node coordinate:", "DEBUG: test_debug_noisy: (path)doc_root.key", "DEBUG: test_debug_noisy: (segment)[0]PathSegmentTypes.KEY<enum 'PathSegmentTypes'>", "DEBUG: test_debug_noisy: (segment)[1]key<class 'str'>", "DEBUG: test_debug_noisy: (node)value", "DEBUG: test_debug_noisy: (parent)[key]value<class 'str'>", "DEBUG: test_debug_noisy: (parentref)key", "DEBUG: test_debug_noisy: (ancestry)[0][0][doc_root][key]value<class 'str'>", "DEBUG: test_debug_noisy: (ancestry)[0][1]doc_root<class 'str'>", "DEBUG: test_debug_noisy: (ancestry)[1][0][key]value<class 'str'>", "DEBUG: test_debug_noisy: (ancestry)[1][1]key<class 'str'>", ]) + "\n" == console.out logger.debug(foldedval) console = capsys.readouterr() assert "\n".join([ "DEBUG: {}<class 'ruamel.yaml.scalarstring.FoldedScalarString'>,folded@{}".format(foldedstr, foldedstrfolds) ])
def test_debug_off(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=False) logger = ConsolePrinter(args) logger.debug("Test") console = capsys.readouterr() assert not console.out
def test_verbose_noisy(self, capsys): args = SimpleNamespace(verbose=True, quiet=False, debug=False) logger = ConsolePrinter(args) logger.verbose("Test") console = capsys.readouterr() assert console.out == "Test\n"
def test_warning_noisy(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=False) logger = ConsolePrinter(args) logger.warning("Test") console = capsys.readouterr() assert console.out == "WARNING: Test\n"
def main(): """Main code.""" # Process any command-line arguments args = processcli() log = ConsolePrinter(args) validateargs(args, log) search_values = True search_keys = False include_key_aliases = False include_value_aliases = False if args.onlykeynames: search_values = False search_keys = True elif args.keynames: search_keys = True if args.include_aliases is IncludeAliases.INCLUDE_ALL_ALIASES: include_key_aliases = True include_value_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_KEY_ALIASES: include_key_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_VALUE_ALIASES: include_value_aliases = True # Prepare the YAML processor yaml = get_yaml_editor() processor = EYAMLProcessor(log, None, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) # Process the input file(s) exit_state = 0 # pylint: disable=too-many-nested-blocks for yaml_file in args.yaml_files: # Try to open the file yaml_data = get_yaml_data(yaml, log, yaml_file) if yaml_data is None: # An error message has already been logged exit_state = 3 continue # Process all searches processor.data = yaml_data yaml_paths = [] for expression in args.search: exterm = get_search_term(log, expression) log.debug(("yaml_paths::main:" + "converting search expression '{}' into '{}'").format( expression, exterm)) if exterm is None: exit_state = 1 continue for result in search_for_paths( log, processor, yaml_data, exterm, args.pathsep, search_values=search_values, search_keys=search_keys, search_anchors=args.refnames, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases, decrypt_eyaml=args.decrypt, expand_children=args.expand): # Record only unique results add_entry = True for entry in yaml_paths: if str(result) == str(entry[1]): add_entry = False break if add_entry: yaml_paths.append((expression, result)) if not yaml_paths: # Nothing further to do when there are no results continue if args.except_expression: for expression in args.except_expression: exterm = get_search_term(log, expression) log.debug( ("yaml_paths::main:" + "converted except expression '{}' into '{}'").format( expression, exterm)) if exterm is None: exit_state = 1 continue for result in search_for_paths( log, processor, yaml_data, exterm, args.pathsep, search_values=search_values, search_keys=search_keys, search_anchors=args.refnames, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases, decrypt_eyaml=args.decrypt, expand_children=args.expand): for entry in yaml_paths: if str(result) == str(entry[1]): yaml_paths.remove(entry) break # Entries are already unique print_results(args, processor, yaml_file, yaml_paths) exit(exit_state)
def test_error_quiet_nonexit(self, capsys): args = SimpleNamespace(verbose=False, quiet=True, debug=False) logger = ConsolePrinter(args) logger.error("Test") console = capsys.readouterr() assert console.err == "ERROR: Test\n"
def info_warn_logger(): """Returns a quiet ConsolePrinter.""" args = SimpleNamespace(verbose=False, quiet=False, debug=False) return ConsolePrinter(args)
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) change_path = YAMLPath(args.change, pathsep=args.pathsep) must_exist = args.mustexist or args.saveto # Obtain the replacement value consumed_stdin = False new_value = None has_new_value = False if args.value or args.value == "": new_value = args.value has_new_value = True elif args.stdin: new_value = ''.join(sys.stdin.readlines()) consumed_stdin = True has_new_value = True elif args.file: with open(args.file, 'r') as fhnd: new_value = fhnd.read().rstrip() has_new_value = True elif args.null: new_value = None has_new_value = True elif args.random is not None: new_value = ''.join( secrets.choice(args.random_from) for _ in range(args.random)) has_new_value = True # Prep the YAML parser yaml = Parsers.get_yaml_editor() # Attempt to open the YAML file; check for parsing errors if args.yaml_file: yaml_data = _try_load_input_file(args, log, yaml, change_path, new_value) if args.yaml_file.strip() == '-': consumed_stdin = True # Check for a waiting STDIN document if (not consumed_stdin and not args.yaml_file and not args.nostdin and not sys.stdin.isatty()): args.yaml_file = "-" yaml_data = _try_load_input_file(args, log, yaml, change_path, new_value) # Load the present nodes at the specified YAML Path processor = EYAMLProcessor(log, yaml_data, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) change_node_coordinates = _get_nodes( log, processor, change_path, must_exist=must_exist, default_value=("" if new_value else " ")) old_format = YAMLValueFormats.DEFAULT if len(change_node_coordinates) == 1: # When there is exactly one result, its old format can be known. This # is necessary to retain whether the replacement value should be # represented later as a multi-line string when the new value is to be # encrypted. old_format = YAMLValueFormats.from_node( change_node_coordinates[0].node) # Check the value(s), if desired if args.check: for node_coordinate in change_node_coordinates: if processor.is_eyaml_value(node_coordinate.node): # Sanity check: If either --publickey or --privatekey were set # then they must both be set in order to decrypt this value. # This is enforced only when the value must be decrypted due to # a --check request. if ((args.publickey and not args.privatekey) or (args.privatekey and not args.publickey)): log.error( "Neither or both private and public EYAML keys must be" + " set when --check is required to decrypt the old" + " value.") sys.exit(1) try: check_value = processor.decrypt_eyaml(node_coordinate.node) except EYAMLCommandException as ex: log.critical(ex, 1) else: check_value = node_coordinate.node if not args.check == check_value: log.critical( '"{}" does not match the check value.'.format(args.check), 20) # Save the old value, if desired and possible if args.saveto: # Only one can be saved; otherwise it is impossible to meaningfully # convey to the end-user from exactly which other YAML node each saved # value came. if len(change_node_coordinates) > 1: log.critical( "It is impossible to meaningly save more than one matched" + " value. Please omit --saveto or set --change to affect" + " exactly one value.", 1) saveto_path = YAMLPath(args.saveto, pathsep=args.pathsep) log.verbose("Saving the old value to {}.".format(saveto_path)) # Folded EYAML values have their embedded newlines converted to spaces # when read. As such, writing them back out breaks their original # format, despite being properly typed. To restore the original # written form, reverse the conversion, here. old_value = change_node_coordinates[0].node if ((old_format is YAMLValueFormats.FOLDED or old_format is YAMLValueFormats.LITERAL) and EYAMLProcessor.is_eyaml_value(old_value)): old_value = old_value.replace(" ", "\n") try: processor.set_value(saveto_path, Nodes.clone_node(old_value), value_format=old_format, tag=args.tag) except YAMLPathException as ex: log.critical(ex, 1) # Set the requested value log.verbose("Applying changes to {}.".format(change_path)) if args.delete: # Destroy the collected nodes (from their parents) in the reverse order # they were discovered. This is necessary lest Array elements be # improperly handled, leading to unwanted data loss. _delete_nodes(log, processor, change_node_coordinates) elif args.aliasof: # Assign the change nodes as Aliases of whatever --aliasof points to _alias_nodes(log, processor, change_node_coordinates, args.aliasof, args.anchor) elif args.eyamlcrypt: # If the user hasn't specified a format, use the same format as the # value being replaced, if known. format_type = YAMLValueFormats.from_str(args.format) if format_type is YAMLValueFormats.DEFAULT: format_type = old_format output_type = EYAMLOutputFormats.STRING if format_type in [YAMLValueFormats.FOLDED, YAMLValueFormats.LITERAL]: output_type = EYAMLOutputFormats.BLOCK try: processor.set_eyaml_value(change_path, new_value, output=output_type, mustexist=False) except EYAMLCommandException as ex: log.critical(ex, 2) elif has_new_value: try: processor.set_value(change_path, new_value, value_format=args.format, mustexist=must_exist, tag=args.tag) except YAMLPathException as ex: log.critical(ex, 1) elif args.tag: _tag_nodes(processor.data, args.tag, change_node_coordinates) # Write out the result write_output_document(args, log, yaml, yaml_data)
def transform(cls, configuration: Configuration): logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False) log = ConsolePrinter(logging_args) processor = Processor(log, configuration.config) # [.!<100] effectively means that the value is non-numerical paths_to_hashes = [ # # branches, old syntax "**.push_access_level[.!<100]", "**.merge_access_level[.!<100]", "**.unprotect_access_level[.!<100]", # members & group members "**.access_level[.!<100]", "**.group_access[.!<100]", # old syntax "**.group_access_level[.!<100]", # tags "**.create_access_level[.!<100]", ] for path in paths_to_hashes: try: for node_coordinate in processor.get_nodes(path): try: access_level_string = str(node_coordinate.node) node_coordinate.parent[ node_coordinate.parentref ] = AccessLevel.get_value(access_level_string) except KeyError: fatal( f"Configuration string '{access_level_string}' is not one of the valid access levels:" f" {', '.join(AccessLevel.get_canonical_names())}", exit_code=EXIT_INVALID_INPUT, ) except YAMLPathException: pass # there are different than the above, as they are elements of arrays # so we need different search query and an extra condition for # transformation paths_to_arrays = [ # # branches, new GitLab Premium syntax "**.allowed_to_push.*.[access_level!<100]", "**.allowed_to_merge.*.[access_level!<100]", "**.allowed_to_unprotect.*.[access_level!<100]", ] for path in paths_to_arrays: try: for node_coordinate in processor.get_nodes(path): if node_coordinate.parentref == "access_level": try: access_level_string = str(node_coordinate.node) node_coordinate.parent[ node_coordinate.parentref ] = AccessLevel.get_value(access_level_string) except KeyError: fatal( f"Configuration string '{access_level_string}' is not one of the valid access levels:" f" {', '.join(AccessLevel.get_canonical_names())}", exit_code=EXIT_INVALID_INPUT, ) except YAMLPathException: pass