def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) # For the remainder of processing, overwrite overwrites output if args.overwrite: args.output = args.overwrite # Merge all input files yaml_editor = Parsers.get_yaml_editor() merge_config = MergerConfig(log, args) exit_state = 0 consumed_stdin = False mergers: List[Merger] = [] merge_count = 0 for yaml_file in args.yaml_files: if yaml_file.strip() == '-': consumed_stdin = True log.debug( "yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) if len(mergers) < 1: (mergers, mergers_loaded) = get_doc_mergers( log, yaml_editor, merge_config, yaml_file) if not mergers_loaded: exit_state = 4 break else: # Merge RHS into LHS exit_state = merge_docs( log, yaml_editor, merge_config, mergers, yaml_file) if not exit_state == 0: break merge_count += 1 # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty() ): exit_state = merge_docs(log, yaml_editor, merge_config, mergers, "-") merge_count += 1 # When no merges have occurred, check for a single-doc merge request if (exit_state == 0 and merge_count == 0 and merge_config.get_multidoc_mode() is MultiDocModes.CONDENSE_ALL ): exit_state = merge_condense_all(log, mergers, []) # Output the final document if exit_state == 0: write_output_document(args, log, yaml_editor, mergers) sys.exit(exit_state)
def main(): """Main code.""" # Process any command-line arguments args = processcli() log = ConsolePrinter(args) validateargs(args, log) exit_state = 0 consumed_stdin = False yaml = Parsers.get_yaml_editor() for yaml_file in args.yaml_files: if yaml_file.strip() == '-': consumed_stdin = True log.debug("yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) proc_state = process_file(log, yaml, yaml_file) if proc_state != 0: exit_state = proc_state # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty()): exit_state = process_file(log, yaml, "-") sys.exit(exit_state)
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) # For the remainder of processing, overwrite overwrites output if args.overwrite: args.output = args.overwrite # Merge all input files merger = Merger(log, None, MergerConfig(log, args)) yaml_editor = Parsers.get_yaml_editor() exit_state = 0 consumed_stdin = False merger_primed = False for yaml_file in args.yaml_files: if yaml_file.strip() == '-': consumed_stdin = True log.debug( "yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) proc_state = process_yaml_file( merger, log, yaml_editor, yaml_file, merger_primed) if proc_state == 0: merger_primed = True else: exit_state = proc_state break # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty() ): exit_state = process_yaml_file( merger, log, yaml_editor, '-', merger_primed) # Output the final document if exit_state == 0: write_output_document(args, log, merger, yaml_editor) sys.exit(exit_state)
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) yaml_path = YAMLPath(args.query, pathsep=args.pathsep) # Prep the YAML parser yaml = Parsers.get_yaml_editor() # Attempt to open the YAML file; check for parsing errors (yaml_data, doc_loaded) = Parsers.get_yaml_data( yaml, log, args.yaml_file if args.yaml_file else "-") if not doc_loaded: # An error message has already been logged sys.exit(1) # Seek the queried value(s) discovered_nodes = [] processor = EYAMLProcessor(log, yaml_data, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) try: for node in processor.get_eyaml_values(yaml_path, mustexist=True): log.debug("Got node from {}:".format(yaml_path), data=node, prefix="yaml_get::main: ") discovered_nodes.append(NodeCoords.unwrap_node_coords(node)) except YAMLPathException as ex: log.critical(ex, 1) except EYAMLCommandException as ex: log.critical(ex, 2) try: for node in discovered_nodes: if isinstance(node, (dict, list, CommentedSet)): print(json.dumps(Parsers.jsonify_yaml_data(node))) else: if node is None: node = "\x00" print("{}".format(str(node).replace("\n", r"\n"))) except RecursionError: log.critical( "The YAML data contains an infinitely recursing YAML Alias!", 1)
def test_debug_noisy(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=True) logger = ConsolePrinter(args) anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor") anchoredval = PlainScalarString("TestVal", anchor="Anchor") logger.debug(anchoredval) console = capsys.readouterr() assert "\n".join([ "DEBUG: TestVal; &Anchor", ]) + "\n" == console.out logger.debug(["test", anchoredval]) console = capsys.readouterr() assert "\n".join([ "DEBUG: [0]=test", "DEBUG: [1]=TestVal; &Anchor", ]) + "\n" == console.out logger.debug({"ichi": 1, anchoredkey: anchoredval}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]=>1", "DEBUG: [TestKey; &KeyAnchor]=>TestVal; &Anchor", ]) + "\n" == console.out
def main(): """Main code.""" # Process any command-line arguments args = processcli() log = ConsolePrinter(args) validateargs(args, log) search_values = True search_keys = False include_key_aliases = False include_value_aliases = False if args.onlykeynames: search_values = False search_keys = True elif args.keynames: search_keys = True if args.include_aliases is IncludeAliases.INCLUDE_ALL_ALIASES: include_key_aliases = True include_value_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_KEY_ALIASES: include_key_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_VALUE_ALIASES: include_value_aliases = True # Prepare the YAML processor yaml = Parsers.get_yaml_editor() processor = EYAMLProcessor( log, None, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) # Process the input file(s) exit_state = 0 file_tally = -1 consumed_stdin = False for yaml_file in args.yaml_files: file_tally += 1 if yaml_file.strip() == "-": consumed_stdin = True log.debug( "yaml_merge::main: Processing file, {}".format( "STDIN" if yaml_file.strip() == "-" else yaml_file)) proc_state = process_yaml_file( args, yaml, log, yaml_file, processor, search_values, search_keys, include_key_aliases, include_value_aliases, file_tally ) if proc_state != 0: exit_state = proc_state # Check for a waiting STDIN document if (exit_state == 0 and not consumed_stdin and not args.nostdin and not sys.stdin.isatty() ): file_tally += 1 exit_state = process_yaml_file( args, yaml, log, "-", processor, search_values, search_keys, include_key_aliases, include_value_aliases, file_tally ) sys.exit(exit_state)
def search_for_paths(logger: ConsolePrinter, processor: EYAMLProcessor, data: Any, terms: SearchTerms, pathsep: PathSeperators = PathSeperators.DOT, build_path: str = "", seen_anchors: Optional[List[str]] = None, **kwargs: bool) -> Generator[YAMLPath, None, None]: """ Recursively search a data structure for nodes matching an expression. The nodes can be keys, values, and/or elements. When dealing with anchors and their aliases, the caller indicates whether to include only the original anchor or the anchor and all of its (duplicate) aliases. """ search_values: bool = kwargs.pop("search_values", True) search_keys: bool = kwargs.pop("search_keys", False) search_anchors: bool = kwargs.pop("search_anchors", False) include_key_aliases: bool = kwargs.pop("include_key_aliases", True) include_value_aliases: bool = kwargs.pop("include_value_aliases", False) decrypt_eyaml: bool = kwargs.pop("decrypt_eyaml", False) expand_children: bool = kwargs.pop("expand_children", False) strsep = str(pathsep) invert = terms.inverted method = terms.method term = terms.term if seen_anchors is None: seen_anchors = [] if isinstance(data, CommentedSeq): # Build the path if not build_path and pathsep is PathSeperators.FSLASH: build_path = strsep build_path += "[" for idx, ele in enumerate(data): # Any element may or may not have an Anchor/Alias anchor_matched = Searches.search_anchor( ele, terms, seen_anchors, search_anchors=search_anchors, include_aliases=include_value_aliases) logger.debug( ("yaml_paths::search_for_paths<list>:" + "anchor search => {}.") .format(anchor_matched) ) # Build the temporary YAML Path using either Anchor or Index if anchor_matched is AnchorMatches.NO_ANCHOR: # Not an anchor/alias, so ref this node by its index tmp_path = build_path + str(idx) + "]" else: tmp_path = "{}&{}]".format( build_path, YAMLPath.escape_path_section(ele.anchor.value, pathsep) ) if anchor_matched is AnchorMatches.ALIAS_EXCLUDED: continue if anchor_matched in [AnchorMatches.MATCH, AnchorMatches.ALIAS_INCLUDED]: logger.debug( ("yaml_paths::search_for_paths<list>:" + "yielding an Anchor/Alias match, {}.") .format(tmp_path) ) if expand_children: for path in yield_children( logger, ele, terms, pathsep, tmp_path, seen_anchors, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases): yield path else: yield YAMLPath(tmp_path) continue if isinstance(ele, (CommentedSeq, CommentedMap)): logger.debug( "Recursing into complex data:", data=ele, prefix="yaml_paths::search_for_paths<list>: ", footer=">>>> >>>> >>>> >>>> >>>> >>>> >>>>") for subpath in search_for_paths( logger, processor, ele, terms, pathsep, tmp_path, seen_anchors, search_values=search_values, search_keys=search_keys, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases, decrypt_eyaml=decrypt_eyaml, expand_children=expand_children ): logger.debug( "Yielding RECURSED match, {}.".format(subpath), prefix="yaml_paths::search_for_paths<list>: ", footer="<<<< <<<< <<<< <<<< <<<< <<<< <<<<" ) yield subpath elif search_values: if (anchor_matched is AnchorMatches.UNSEARCHABLE_ALIAS and not include_value_aliases): continue check_value = ele if decrypt_eyaml and processor.is_eyaml_value(ele): check_value = processor.decrypt_eyaml(ele) matches = Searches.search_matches(method, term, check_value) if (matches and not invert) or (invert and not matches): logger.debug( ("yaml_paths::search_for_paths<list>:" + "yielding VALUE match, {}: {}." ).format(check_value, tmp_path) ) yield YAMLPath(tmp_path) # pylint: disable=too-many-nested-blocks elif isinstance(data, CommentedMap): if build_path: build_path += strsep elif pathsep is PathSeperators.FSLASH: build_path = strsep pool = data.non_merged_items() if include_key_aliases or include_value_aliases: pool = data.items() for key, val in pool: tmp_path = build_path + YAMLPath.escape_path_section(key, pathsep) # Search the value anchor to have it on record, in case the key # anchor match would otherwise block the value anchor from # appearing in seen_anchors (which is important). val_anchor_matched = Searches.search_anchor( val, terms, seen_anchors, search_anchors=search_anchors, include_aliases=include_value_aliases) logger.debug( ("yaml_paths::search_for_paths<dict>:" + "VALUE anchor search => {}.") .format(val_anchor_matched) ) # Search the key when the caller wishes it. if search_keys: # The key itself may be an Anchor or Alias. Search it when the # caller wishes. key_anchor_matched = Searches.search_anchor( key, terms, seen_anchors, search_anchors=search_anchors, include_aliases=include_key_aliases) logger.debug( ("yaml_paths::search_for_paths<dict>:" + "KEY anchor search, {}: {}.") .format(key, key_anchor_matched) ) if key_anchor_matched in [AnchorMatches.MATCH, AnchorMatches.ALIAS_INCLUDED]: logger.debug( ("yaml_paths::search_for_paths<dict>:" + "yielding a KEY-ANCHOR match, {}." ).format(key, tmp_path) ) if expand_children: for path in yield_children( logger, val, terms, pathsep, tmp_path, seen_anchors, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases): yield path else: yield YAMLPath(tmp_path) continue # Search the name of the key, itself matches = Searches.search_matches(method, term, key) if (matches and not invert) or (invert and not matches): logger.debug( ("yaml_paths::search_for_paths<dict>:" + "yielding KEY name match, {}: {}." ).format(key, tmp_path) ) if expand_children: # Include every non-excluded child node under this # matched parent node. for path in yield_children( logger, val, terms, pathsep, tmp_path, seen_anchors, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases): yield path else: # No other matches within this node matter because they # are already in the result. yield YAMLPath(tmp_path) continue # The value may itself be anchored; search it if requested if val_anchor_matched is AnchorMatches.ALIAS_EXCLUDED: continue if val_anchor_matched in [AnchorMatches.MATCH, AnchorMatches.ALIAS_INCLUDED]: logger.debug( ("yaml_paths::search_for_paths<dict>:" + "yielding a VALUE-ANCHOR match, {}.") .format(tmp_path) ) if expand_children: for path in yield_children( logger, val, terms, pathsep, tmp_path, seen_anchors, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases): yield path else: yield YAMLPath(tmp_path) continue if isinstance(val, (CommentedSeq, CommentedMap)): logger.debug( "Recursing into complex data:", data=val, prefix="yaml_paths::search_for_paths<dict>: ", footer=">>>> >>>> >>>> >>>> >>>> >>>> >>>>" ) for subpath in search_for_paths( logger, processor, val, terms, pathsep, tmp_path, seen_anchors, search_values=search_values, search_keys=search_keys, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases, decrypt_eyaml=decrypt_eyaml, expand_children=expand_children ): logger.debug( "Yielding RECURSED match, {}.".format(subpath), prefix="yaml_paths::search_for_paths<dict>: ", footer="<<<< <<<< <<<< <<<< <<<< <<<< <<<<" ) yield subpath elif search_values: if (val_anchor_matched is AnchorMatches.UNSEARCHABLE_ALIAS and not include_value_aliases): continue check_value = val if decrypt_eyaml and processor.is_eyaml_value(val): check_value = processor.decrypt_eyaml(val) matches = Searches.search_matches(method, term, check_value) if (matches and not invert) or (invert and not matches): logger.debug( ("yaml_paths::search_for_paths<dict>:" + "yielding VALUE match, {}: {}." ).format(check_value, tmp_path) ) yield YAMLPath(tmp_path)
def yield_children(logger: ConsolePrinter, data: Any, terms: SearchTerms, pathsep: PathSeperators, build_path: str, seen_anchors: List[str], **kwargs: bool) -> Generator[YAMLPath, None, None]: """ Dump the YAML Path of every child node beneath a given parent. Except for unwanted aliases, the dump is unconditional. """ include_key_aliases: bool = kwargs.pop("include_key_aliases", True) include_value_aliases: bool = kwargs.pop("include_value_aliases", False) search_anchors: bool = kwargs.pop("search_anchors", False) logger.debug( "Dumping all children in data of type, {}:" .format(type(data)), data=data, prefix="yaml_paths::yield_children: ") exclude_alias_matchers = [AnchorMatches.UNSEARCHABLE_ALIAS, AnchorMatches.ALIAS_EXCLUDED] if isinstance(data, CommentedSeq): if not build_path and pathsep is PathSeperators.FSLASH: build_path = str(pathsep) build_path += "[" for idx, ele in enumerate(data): anchor_matched = Searches.search_anchor( ele, terms, seen_anchors, search_anchors=search_anchors, include_aliases=include_value_aliases) logger.debug( ("yaml_paths::yield_children<list>: " + "element[{}] has anchor search => {}.") .format(idx, anchor_matched)) # Build the temporary YAML Path using either Anchor or Index if anchor_matched is AnchorMatches.NO_ANCHOR: # Not an anchor/alias, so ref this node by its index tmp_path = build_path + str(idx) + "]" else: tmp_path = "{}&{}]".format( build_path, YAMLPath.escape_path_section(ele.anchor.value, pathsep) ) if (not include_value_aliases and anchor_matched in exclude_alias_matchers): continue if isinstance(ele, (CommentedMap, CommentedSeq)): for path in yield_children( logger, ele, terms, pathsep, tmp_path, seen_anchors, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases): yield path else: yield YAMLPath(tmp_path) elif isinstance(data, CommentedMap): if build_path: build_path += str(pathsep) elif pathsep is PathSeperators.FSLASH: build_path = str(pathsep) pool = data.non_merged_items() if include_key_aliases or include_value_aliases: pool = data.items() for key, val in pool: tmp_path = build_path + YAMLPath.escape_path_section(key, pathsep) key_anchor_matched = Searches.search_anchor( key, terms, seen_anchors, search_anchors=search_anchors, include_aliases=include_key_aliases) val_anchor_matched = Searches.search_anchor( val, terms, seen_anchors, search_anchors=search_anchors, include_aliases=include_value_aliases) logger.debug( ("yaml_paths::yield_children<dict>: " + "key[{}]:value have value anchor search => {}:{}.") .format(key, key_anchor_matched, val_anchor_matched)) if ( (not include_key_aliases and key_anchor_matched in exclude_alias_matchers) or (not include_value_aliases and val_anchor_matched in exclude_alias_matchers) ): continue if isinstance(val, (CommentedSeq, CommentedMap)): for path in yield_children( logger, val, terms, pathsep, tmp_path, seen_anchors, search_anchors=search_anchors, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases): yield path else: yield YAMLPath(tmp_path) else: if not build_path and pathsep is PathSeperators.FSLASH: build_path = str(pathsep) yield YAMLPath(build_path)
def main(): """Main code.""" # Process any command-line arguments args = processcli() log = ConsolePrinter(args) validateargs(args, log) search_values = True search_keys = False include_key_aliases = False include_value_aliases = False if args.onlykeynames: search_values = False search_keys = True elif args.keynames: search_keys = True if args.include_aliases is IncludeAliases.INCLUDE_ALL_ALIASES: include_key_aliases = True include_value_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_KEY_ALIASES: include_key_aliases = True elif args.include_aliases is IncludeAliases.INCLUDE_VALUE_ALIASES: include_value_aliases = True # Prepare the YAML processor yaml = get_yaml_editor() processor = EYAMLProcessor(log, None, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) # Process the input file(s) exit_state = 0 # pylint: disable=too-many-nested-blocks for yaml_file in args.yaml_files: # Try to open the file yaml_data = get_yaml_data(yaml, log, yaml_file) if yaml_data is None: # An error message has already been logged exit_state = 3 continue # Process all searches processor.data = yaml_data yaml_paths = [] for expression in args.search: exterm = get_search_term(log, expression) log.debug(("yaml_paths::main:" + "converting search expression '{}' into '{}'").format( expression, exterm)) if exterm is None: exit_state = 1 continue for result in search_for_paths( log, processor, yaml_data, exterm, args.pathsep, search_values=search_values, search_keys=search_keys, search_anchors=args.refnames, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases, decrypt_eyaml=args.decrypt, expand_children=args.expand): # Record only unique results add_entry = True for entry in yaml_paths: if str(result) == str(entry[1]): add_entry = False break if add_entry: yaml_paths.append((expression, result)) if not yaml_paths: # Nothing further to do when there are no results continue if args.except_expression: for expression in args.except_expression: exterm = get_search_term(log, expression) log.debug( ("yaml_paths::main:" + "converted except expression '{}' into '{}'").format( expression, exterm)) if exterm is None: exit_state = 1 continue for result in search_for_paths( log, processor, yaml_data, exterm, args.pathsep, search_values=search_values, search_keys=search_keys, search_anchors=args.refnames, include_key_aliases=include_key_aliases, include_value_aliases=include_value_aliases, decrypt_eyaml=args.decrypt, expand_children=args.expand): for entry in yaml_paths: if str(result) == str(entry[1]): yaml_paths.remove(entry) break # Entries are already unique print_results(args, processor, yaml_file, yaml_paths) exit(exit_state)
def get_yaml_multidoc_data( parser: Any, logger: ConsolePrinter, source: str, **kwargs) -> Generator[Tuple[Any, bool], None, None]: """ Parse YAML/Compatible multi-docs and yield each ruamel.yaml object. All known issues are caught and distinctively logged. Parameters: 1. parser (ruamel.yaml.YAML) The YAML data parser 2. logger (ConsolePrinter) The logging facility 3. source (str) The source file to load; can be - for reading from STDIN Keyword Parameters: * literal (bool) `source` is literal serialized YAML data rather than a file-spec, so load it directly Returns: Generator[Tuple[Any, bool], None, None] A tuple for each document as it is parsed. The first field is the parsed document; will be None for empty documents and for documents which could not be read. The second field will be True when there were no errors during parsing and False, otherwise. """ literal = kwargs.pop("literal", False) # This code traps errors and warnings from ruamel.yaml, substituting # lengthy stack-dumps with specific, meaningful feedback. Further, # some warnings are treated as errors by ruamel.yaml, so these are also # coallesced into cleaner feedback. has_error = False try: with warnings.catch_warnings(): warnings.filterwarnings("error") if source == "-": doc_yielded = False for document in parser.load_all(stdin.read()): doc_yielded = True logger.debug( "Yielding document from {}:".format(source), prefix="get_yaml_multidoc_data: ", data=document) yield (document, True) # The user sent a deliberately empty document via STDIN if not doc_yielded: yield ("", True) else: if literal: for document in parser.load_all(source): yield (document, True) else: with open(source, 'r') as fhnd: for document in parser.load_all(fhnd): logger.debug( "Yielding document from {}:".format( source), prefix="get_yaml_multidoc_data: ", data=document) yield (document, True) except KeyboardInterrupt: has_error = True logger.error("Aborting data load due to keyboard interrupt!") except FileNotFoundError: has_error = True logger.error("File not found: {}".format(source)) except ParserError as ex: has_error = True logger.error("YAML parsing error {}: {}".format( str(ex.problem_mark).lstrip(), ex.problem)) except ComposerError as ex: has_error = True logger.error("YAML composition error {}: {}".format( str(ex.problem_mark).lstrip(), ex.problem)) except ConstructorError as ex: has_error = True logger.error("YAML construction error {}: {}".format( str(ex.problem_mark).lstrip(), ex.problem)) except ScannerError as ex: has_error = True logger.error("YAML syntax error {}: {}".format( str(ex.problem_mark).lstrip(), ex.problem)) except DuplicateKeyError as dke: has_error = True omits = [ "while constructing", "To suppress this", "readthedocs", "future releases", "the new API", ] message = str(dke).split("\n") newmsg = "" for line in message: line = line.strip() if not line: continue write_line = True for omit in omits: if omit in line: write_line = False break if write_line: newmsg += "\n " + line logger.error("Duplicate Hash key detected: {}".format(newmsg)) except ReusedAnchorWarning as raw: has_error = True logger.error("Duplicate YAML Anchor detected: {}".format( str(raw).replace("occurrence ", "occurrence ").replace("\n", "\n "))) if has_error: yield (None, False)
def test_debug_noisy(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=True) logger = ConsolePrinter(args) anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor") anchoredval = PlainScalarString("TestVal", anchor="Anchor") foldedstr = "123456789 123456789 123456789" foldedstrfolds = [10, 20] foldedval = FoldedScalarString(foldedstr) foldedval.fold_pos = foldedstrfolds logger.debug(anchoredval) console = capsys.readouterr() assert "\n".join([ "DEBUG: (&Anchor)TestVal", ]) + "\n" == console.out logger.debug(["test", anchoredval]) console = capsys.readouterr() assert "\n".join([ "DEBUG: [0]test<class 'str'>", "DEBUG: [1](&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>", ]) + "\n" == console.out logger.debug({"ichi": 1, anchoredkey: anchoredval}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]1<class 'int'>", "DEBUG: [TestKey](&KeyAnchor,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>", ]) + "\n" == console.out logger.debug({"ichi": 1, anchoredkey: "non-anchored value"}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]1<class 'int'>", "DEBUG: [TestKey](&KeyAnchor,_)non-anchored value<class 'str'>", ]) + "\n" == console.out logger.debug({"ichi": 1, "non-anchored-key": anchoredval}) console = capsys.readouterr() assert "\n".join([ "DEBUG: [ichi]1<class 'int'>", "DEBUG: [non-anchored-key](_,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>", ]) + "\n" == console.out tagged_value = "value" tagged_value_node = TaggedScalar(tagged_value, tag="!tag") tagged_sequence = CommentedSeq(["a", "b"]) tagged_sequence.yaml_set_tag("!raz") selfref_value = "self_referring" selfref_value_node = TaggedScalar(selfref_value, tag="!self_referring") logger.debug( "test_wrappers_consoleprinter:", prefix="test_debug_noisy: ", header="--- HEADER ---", footer="=== FOOTER ===", data_header="+++ DATA HEADER +++", data_footer="::: DATA FOOTER :::", data=CommentedMap({ "key": "value", "tagged": tagged_value_node, tagged_value_node: "untagged value", selfref_value_node: selfref_value_node, "array": ["ichi", "ni", "san"], "tagged_array": tagged_sequence, "aoh": [{"id": 1},{"id": 2},{"id": 3}], "aoa": [[True, True], [True, False], [False, True]], "dod": {"a": {"b": {"c": "d"}}}, "set": CommentedSet(["one", "two"]), }) ) console = capsys.readouterr() assert "\n".join([ "DEBUG: test_debug_noisy: --- HEADER ---", "DEBUG: test_debug_noisy: test_wrappers_consoleprinter:", "DEBUG: test_debug_noisy: +++ DATA HEADER +++", "DEBUG: test_debug_noisy: [key]value<class 'str'>", "DEBUG: test_debug_noisy: [tagged]<_,!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)", "DEBUG: test_debug_noisy: [value]<!tag,_>untagged value<class 'str'>", "DEBUG: test_debug_noisy: [self_referring]<!self_referring,!self_referring>self_referring<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)", "DEBUG: test_debug_noisy: [array][0]ichi<class 'str'>", "DEBUG: test_debug_noisy: [array][1]ni<class 'str'>", "DEBUG: test_debug_noisy: [array][2]san<class 'str'>", "DEBUG: test_debug_noisy: [tagged_array]<_,!raz>[0]a<class 'str'>", "DEBUG: test_debug_noisy: [tagged_array]<_,!raz>[1]b<class 'str'>", "DEBUG: test_debug_noisy: [aoh][0][id]1<class 'int'>", "DEBUG: test_debug_noisy: [aoh][1][id]2<class 'int'>", "DEBUG: test_debug_noisy: [aoh][2][id]3<class 'int'>", "DEBUG: test_debug_noisy: [aoa][0][0]True<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][0][1]True<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][1][0]True<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][1][1]False<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][2][0]False<class 'bool'>", "DEBUG: test_debug_noisy: [aoa][2][1]True<class 'bool'>", "DEBUG: test_debug_noisy: [dod][a][b][c]d<class 'str'>", "DEBUG: test_debug_noisy: [set]{one}<class 'str'>", "DEBUG: test_debug_noisy: [set]{two}<class 'str'>", "DEBUG: test_debug_noisy: ::: DATA FOOTER :::", "DEBUG: test_debug_noisy: === FOOTER ===", ]) + "\n" == console.out logger.debug(tagged_value_node) console = capsys.readouterr() assert "\n".join([ "DEBUG: <!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)", ]) logger.debug(tagged_sequence) console = capsys.readouterr() assert "\n".join([ "DEBUG: [tagged_array]<!raz>[0]a<class 'str'>", "DEBUG: [tagged_array]<!raz>[1]b<class 'str'>", ]) nc = NodeCoords( "value", dict(key="value"), "key", YAMLPath("doc_root.key"), [ (dict(doc_root=dict(key="value")), "doc_root"), (dict(key="value"), "key")], (PathSegmentTypes.KEY, "key") ) logger.debug( "A node coordinate:", prefix="test_debug_noisy: ", data=nc) console = capsys.readouterr() assert "\n".join([ "DEBUG: test_debug_noisy: A node coordinate:", "DEBUG: test_debug_noisy: (path)doc_root.key", "DEBUG: test_debug_noisy: (segment)[0]PathSegmentTypes.KEY<enum 'PathSegmentTypes'>", "DEBUG: test_debug_noisy: (segment)[1]key<class 'str'>", "DEBUG: test_debug_noisy: (node)value", "DEBUG: test_debug_noisy: (parent)[key]value<class 'str'>", "DEBUG: test_debug_noisy: (parentref)key", "DEBUG: test_debug_noisy: (ancestry)[0][0][doc_root][key]value<class 'str'>", "DEBUG: test_debug_noisy: (ancestry)[0][1]doc_root<class 'str'>", "DEBUG: test_debug_noisy: (ancestry)[1][0][key]value<class 'str'>", "DEBUG: test_debug_noisy: (ancestry)[1][1]key<class 'str'>", ]) + "\n" == console.out logger.debug(foldedval) console = capsys.readouterr() assert "\n".join([ "DEBUG: {}<class 'ruamel.yaml.scalarstring.FoldedScalarString'>,folded@{}".format(foldedstr, foldedstrfolds) ])
def test_debug_off(self, capsys): args = SimpleNamespace(verbose=False, quiet=False, debug=False) logger = ConsolePrinter(args) logger.debug("Test") console = capsys.readouterr() assert not console.out
def main(): """Main code.""" args = processcli() log = ConsolePrinter(args) validateargs(args, log) change_path = YAMLPath(args.change, pathsep=args.pathsep) backup_file = args.yaml_file + ".bak" # Obtain the replacement value if args.value: new_value = args.value elif args.stdin: new_value = ''.join(sys.stdin.readlines()) elif args.file: with open(args.file, 'r') as fhnd: new_value = fhnd.read().rstrip() elif args.random is not None: new_value = ''.join( secrets.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(args.random)) # Prep the YAML parser yaml = get_yaml_editor() # Attempt to open the YAML file; check for parsing errors yaml_data = get_yaml_data(yaml, log, args.yaml_file) if yaml_data is None: # An error message has already been logged exit(1) # Load the present value at the specified YAML Path change_nodes = [] old_format = YAMLValueFormats.DEFAULT processor = EYAMLProcessor(log, yaml_data, binary=args.eyaml, publickey=args.publickey, privatekey=args.privatekey) try: for node in processor.get_nodes( change_path, mustexist=(args.mustexist or args.saveto), default_value=("" if new_value else " ")): log.debug('Got "{}" from {}.'.format(node, change_path)) change_nodes.append(node) except YAMLPathException as ex: log.critical(ex, 1) if len(change_nodes) == 1: # When there is exactly one result, its old format can be known. This # is necessary to retain whether the replacement value should be # represented later as a multi-line string when the new value is to be # encrypted. old_format = YAMLValueFormats.from_node(change_nodes[0]) log.debug("Collected nodes:") log.debug(change_nodes) # Check the value(s), if desired if args.check: for node in change_nodes: if processor.is_eyaml_value(node): # Sanity check: If either --publickey or --privatekey were set # then they must both be set in order to decrypt this value. # This is enforced only when the value must be decrypted due to # a --check request. if ((args.publickey and not args.privatekey) or (args.privatekey and not args.publickey)): log.error( "Neither or both private and public EYAML keys must be" + " set when --check is required to decrypt the old" + " value.") exit(1) try: check_value = processor.decrypt_eyaml(node) except EYAMLCommandException as ex: log.critical(ex, 1) else: check_value = node if not args.check == check_value: log.critical( '"{}" does not match the check value.'.format(args.check), 20) # Save the old value, if desired and possible if args.saveto: # Only one can be saved; otherwise it is impossible to meaningfully # convey to the end-user from exactly which other YAML node each saved # value came. if len(change_nodes) > 1: log.critical( "It is impossible to meaningly save more than one matched" + " value. Please omit --saveto or set --change to affect" + " exactly one value.", 1) saveto_path = YAMLPath(args.saveto, pathsep=args.pathsep) log.verbose("Saving the old value to {}.".format(saveto_path)) # Folded EYAML values have their embedded newlines converted to spaces # when read. As such, writing them back out breaks their original # format, despite being properly typed. To restore the original # written form, reverse the conversion, here. old_value = change_nodes[0] if ((old_format is YAMLValueFormats.FOLDED or old_format is YAMLValueFormats.LITERAL) and EYAMLProcessor.is_eyaml_value(old_value)): old_value = old_value.replace(" ", "\n") try: processor.set_value(saveto_path, clone_node(old_value), value_format=old_format) except YAMLPathException as ex: log.critical(ex, 1) # Set the requested value log.verbose("Setting the new value for {}.".format(change_path)) if args.eyamlcrypt: # If the user hasn't specified a format, use the same format as the # value being replaced, if known. format_type = YAMLValueFormats.from_str(args.format) if format_type is YAMLValueFormats.DEFAULT: format_type = old_format output_type = EYAMLOutputFormats.STRING if format_type in [YAMLValueFormats.FOLDED, YAMLValueFormats.LITERAL]: output_type = EYAMLOutputFormats.BLOCK try: processor.set_eyaml_value(change_path, new_value, output=output_type, mustexist=False) except EYAMLCommandException as ex: log.critical(ex, 2) else: processor.set_value(change_path, new_value, value_format=args.format) # Save a backup of the original file, if requested if args.backup: log.verbose("Saving a backup of {} to {}.".format( args.yaml_file, backup_file)) if exists(backup_file): remove(backup_file) copy2(args.yaml_file, backup_file) # Save the changed file log.verbose("Writing changed data to {}.".format(args.yaml_file)) with open(args.yaml_file, 'w') as yaml_dump: yaml.dump(yaml_data, yaml_dump)