def test_error_quiet_exit(self, capsys):
     args = SimpleNamespace(verbose=False, quiet=True, debug=False)
     logger = ConsolePrinter(args)
     with pytest.raises(SystemExit):
         logger.error("Test", 27)
         console = capsys.readouterr()
         assert console.err == "ERROR:  Test\n"
Ejemplo n.º 2
0
def merge_across(
    log: ConsolePrinter, lhs_docs: List[Merger], rhs_docs: List[Merger]
) -> int:
    """Condense LHS and RHS multi-docs together into one."""
    return_state = 0
    lhs_len = len(lhs_docs)
    rhs_len = len(rhs_docs)
    max_len = lhs_len if lhs_len > rhs_len else rhs_len
    lhs_limit = lhs_len - 1
    rhs_limit = rhs_len - 1
    for i in range(0, max_len):
        if i > rhs_limit:
            break
        if i > lhs_limit:
            lhs_docs.append(rhs_docs[i])
            continue
        try:
            lhs_docs[i].merge_with(rhs_docs[i].data)
        except MergeException as mex:
            log.error(mex)
            return_state = 31
            break
        except YAMLPathException as yex:
            log.error(yex)
            return_state = 32
            break

    return return_state
Ejemplo n.º 3
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)

    # For the remainder of processing, overwrite overwrites output
    if args.overwrite:
        args.output = args.overwrite

    # Merge all input files
    yaml_editor = Parsers.get_yaml_editor()
    merge_config = MergerConfig(log, args)
    exit_state = 0
    consumed_stdin = False
    mergers: List[Merger] = []
    merge_count = 0
    for yaml_file in args.yaml_files:
        if yaml_file.strip() == '-':
            consumed_stdin = True

        log.debug(
            "yaml_merge::main:  Processing file, {}".format(
                "STDIN" if yaml_file.strip() == "-" else yaml_file))

        if len(mergers) < 1:
            (mergers, mergers_loaded) = get_doc_mergers(
                log, yaml_editor, merge_config, yaml_file)
            if not mergers_loaded:
                exit_state = 4
                break
        else:
            # Merge RHS into LHS
            exit_state = merge_docs(
                log, yaml_editor, merge_config, mergers, yaml_file)
            if not exit_state == 0:
                break
            merge_count += 1

    # Check for a waiting STDIN document
    if (exit_state == 0
        and not consumed_stdin
        and not args.nostdin
        and not sys.stdin.isatty()
    ):
        exit_state = merge_docs(log, yaml_editor, merge_config, mergers, "-")
        merge_count += 1

    # When no merges have occurred, check for a single-doc merge request
    if (exit_state == 0
        and merge_count == 0
        and merge_config.get_multidoc_mode() is MultiDocModes.CONDENSE_ALL
    ):
        exit_state = merge_condense_all(log, mergers, [])

    # Output the final document
    if exit_state == 0:
        write_output_document(args, log, yaml_editor, mergers)

    sys.exit(exit_state)
 def test_critical_quiet(self, capsys):
     args = SimpleNamespace(verbose=False, quiet=True, debug=False)
     logger = ConsolePrinter(args)
     with pytest.raises(SystemExit):
         logger.critical("Test")
         console = capsys.readouterr()
         assert console.err == "CRITICAL:  Test\n"
Ejemplo n.º 5
0
def main():
    """Main code."""
    # Process any command-line arguments
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    exit_state = 0
    consumed_stdin = False
    yaml = Parsers.get_yaml_editor()

    for yaml_file in args.yaml_files:
        if yaml_file.strip() == '-':
            consumed_stdin = True

        log.debug("yaml_merge::main:  Processing file, {}".format(
            "STDIN" if yaml_file.strip() == "-" else yaml_file))

        proc_state = process_file(log, yaml, yaml_file)

        if proc_state != 0:
            exit_state = proc_state

    # Check for a waiting STDIN document
    if (exit_state == 0 and not consumed_stdin and not args.nostdin
            and not sys.stdin.isatty()):
        exit_state = process_file(log, yaml, "-")

    sys.exit(exit_state)
Ejemplo n.º 6
0
def write_output_document(
    args: argparse.Namespace, log: ConsolePrinter, yaml_editor: YAML,
    docs: List[Merger]
) -> None:
    """Save a backup of the overwrite file, if requested."""
    if args.backup:
        backup_file = args.overwrite + ".bak"
        log.verbose(
            "Saving a backup of {} to {}."
            .format(args.overwrite, backup_file))
        if exists(backup_file):
            remove(backup_file)
        copy2(args.overwrite, backup_file)

    document_is_json = (
        docs[0].prepare_for_dump(yaml_editor, args.output)
        is OutputDocTypes.JSON)

    dumps = []
    for doc in docs:
        doc.prepare_for_dump(yaml_editor, args.output)
        dumps.append(doc.data)

    if args.output:
        with open(args.output, 'w', encoding='utf-8') as out_fhnd:
            if document_is_json:
                if len(dumps) > 1:
                    for dump in dumps:
                        print(
                            json.dumps(Parsers.jsonify_yaml_data(dump)),
                            file=out_fhnd)
                else:
                    json.dump(Parsers.jsonify_yaml_data(dumps[0]), out_fhnd)
            else:
                if len(dumps) > 1:
                    yaml_editor.explicit_end = True  # type: ignore
                    yaml_editor.dump_all(dumps, out_fhnd)
                else:
                    yaml_editor.dump(dumps[0], out_fhnd)
    else:
        if document_is_json:
            if len(dumps) > 1:
                for dump in dumps:
                    print(json.dumps(Parsers.jsonify_yaml_data(dump)))
            else:
                json.dump(Parsers.jsonify_yaml_data(dumps[0]), sys.stdout)
        else:
            if len(dumps) > 1:
                yaml_editor.explicit_end = True  # type: ignore
                yaml_editor.dump_all(dumps, sys.stdout)
            else:
                yaml_editor.dump(dumps[0], sys.stdout)
Ejemplo n.º 7
0
def merge_condense_all(
    log: ConsolePrinter, lhs_docs: List[Merger], rhs_docs: List[Merger]
) -> int:
    """Condense LHS and RHS multi-docs together into one."""
    return_state = 0
    lhs_prime = lhs_docs[0]
    if len(lhs_docs) > 1:
        for lhs_doc in lhs_docs[1:]:
            try:
                lhs_prime.merge_with(lhs_doc.data)
            except MergeException as mex:
                log.error(mex)
                return_state = 11
            except YAMLPathException as yex:
                log.error(yex)
                return_state = 12

    # With all subdocs merged into the first, eliminate all subdocs
    for i in reversed(range(1, len(lhs_docs))):
        del lhs_docs[i]

    # Merge every RHS doc into the prime LHS doc
    for rhs_doc in rhs_docs:
        try:
            lhs_prime.merge_with(rhs_doc.data)
        except MergeException as mex:
            log.error(mex)
            return_state = 13
        except YAMLPathException as yex:
            log.error(yex)
            return_state = 14

    return return_state
Ejemplo n.º 8
0
    def test_debug_noisy(self, capsys):
        args = SimpleNamespace(verbose=False, quiet=False, debug=True)
        logger = ConsolePrinter(args)
        anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor")
        anchoredval = PlainScalarString("TestVal", anchor="Anchor")

        logger.debug(anchoredval)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  TestVal; &Anchor",
        ]) + "\n" == console.out

        logger.debug(["test", anchoredval])
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [0]=test",
            "DEBUG:  [1]=TestVal; &Anchor",
        ]) + "\n" == console.out

        logger.debug({"ichi": 1, anchoredkey: anchoredval})
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [ichi]=>1",
            "DEBUG:  [TestKey; &KeyAnchor]=>TestVal; &Anchor",
        ]) + "\n" == console.out
Ejemplo n.º 9
0
def process_yaml_file(
    merger: Merger, log: ConsolePrinter, rhs_yaml: Any, rhs_file: str,
    merger_primed: bool
):
    """Merge RHS document(s) into the prime document."""
    # Except for - (STDIN), each YAML_FILE must actually be a file; because
    # merge data is expected, this is a fatal failure.
    if rhs_file != "-" and not isfile(rhs_file):
        log.error("Not a file:  {}".format(rhs_file))
        return 2

    log.info(
        "Processing {}...".format(
            "STDIN" if rhs_file.strip() == "-" else rhs_file))

    return merge_multidoc(rhs_file, rhs_yaml, log, merger, merger_primed)
Ejemplo n.º 10
0
    def _parse_yaml(source: str, config_string: bool):

        logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False)
        log = ConsolePrinter(logging_args)

        yaml = Parsers.get_yaml_editor()

        # for better backward compatibility with PyYAML (that supports only YAML 1.1) used in the previous
        # GitLabForm versions, let's force ruamel.yaml to use YAML version 1.1 by default too
        yaml.version = (1, 1)

        if config_string:
            config_string = textwrap.dedent(source)
            verbose("Reading config from the provided string.")
            (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml,
                                                            log,
                                                            config_string,
                                                            literal=True)
        else:
            config_path = source
            verbose(f"Reading config from file: {config_path}")
            (yaml_data,
             doc_loaded) = Parsers.get_yaml_data(yaml, log, config_path)

        if doc_loaded:
            debug("Config parsed successfully as YAML.")
        else:
            # an error message has already been printed via ConsolePrinter
            exit(EXIT_INVALID_INPUT)

        return yaml_data
Ejemplo n.º 11
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)

    # For the remainder of processing, overwrite overwrites output
    if args.overwrite:
        args.output = args.overwrite

    # Merge all input files
    merger = Merger(log, None, MergerConfig(log, args))
    yaml_editor = Parsers.get_yaml_editor()
    exit_state = 0
    consumed_stdin = False
    merger_primed = False
    for yaml_file in args.yaml_files:
        if yaml_file.strip() == '-':
            consumed_stdin = True

        log.debug(
            "yaml_merge::main:  Processing file, {}".format(
                "STDIN" if yaml_file.strip() == "-" else yaml_file))
        proc_state = process_yaml_file(
            merger, log, yaml_editor, yaml_file, merger_primed)

        if proc_state == 0:
            merger_primed = True
        else:
            exit_state = proc_state
            break

    # Check for a waiting STDIN document
    if (exit_state == 0
        and not consumed_stdin
        and not args.nostdin
        and not sys.stdin.isatty()
    ):
        exit_state = process_yaml_file(
            merger, log, yaml_editor, '-', merger_primed)

    # Output the final document
    if exit_state == 0:
        write_output_document(args, log, merger, yaml_editor)

    sys.exit(exit_state)
Ejemplo n.º 12
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    yaml_path = YAMLPath(args.query, pathsep=args.pathsep)

    # Prep the YAML parser
    yaml = Parsers.get_yaml_editor()

    # Attempt to open the YAML file; check for parsing errors
    (yaml_data, doc_loaded) = Parsers.get_yaml_data(
        yaml, log, args.yaml_file if args.yaml_file else "-")
    if not doc_loaded:
        # An error message has already been logged
        sys.exit(1)

    # Seek the queried value(s)
    discovered_nodes = []
    processor = EYAMLProcessor(log,
                               yaml_data,
                               binary=args.eyaml,
                               publickey=args.publickey,
                               privatekey=args.privatekey)
    try:
        for node in processor.get_eyaml_values(yaml_path, mustexist=True):
            log.debug("Got node from {}:".format(yaml_path),
                      data=node,
                      prefix="yaml_get::main:  ")
            discovered_nodes.append(NodeCoords.unwrap_node_coords(node))
    except YAMLPathException as ex:
        log.critical(ex, 1)
    except EYAMLCommandException as ex:
        log.critical(ex, 2)

    try:
        for node in discovered_nodes:
            if isinstance(node, (dict, list, CommentedSet)):
                print(json.dumps(Parsers.jsonify_yaml_data(node)))
            else:
                if node is None:
                    node = "\x00"
                print("{}".format(str(node).replace("\n", r"\n")))
    except RecursionError:
        log.critical(
            "The YAML data contains an infinitely recursing YAML Alias!", 1)
Ejemplo n.º 13
0
def merge_matrix(
    log: ConsolePrinter, lhs_docs: List[Merger], rhs_docs: List[Merger]
) -> int:
    """Condense LHS and RHS multi-docs together into one."""
    return_state = 0
    for lhs_doc in lhs_docs:
        for rhs_doc in rhs_docs:
            try:
                lhs_doc.merge_with(rhs_doc.data)
            except MergeException as mex:
                log.error(mex)
                return_state = 41
                break
            except YAMLPathException as yex:
                log.error(yex)
                return_state = 42
                break

    return return_state
Ejemplo n.º 14
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    exit_state = 0
    lhs_file = args.yaml_files[0]
    rhs_file = args.yaml_files[1]
    lhs_yaml = Parsers.get_yaml_editor()
    rhs_yaml = Parsers.get_yaml_editor()
    (lhs_docs, lhs_loaded) = get_docs(log, lhs_yaml, lhs_file)
    (rhs_docs, rhs_loaded) = get_docs(log, rhs_yaml, rhs_file)
    lhs_doc_count = len(lhs_docs) if lhs_loaded else 0
    rhs_doc_count = len(rhs_docs) if rhs_loaded else 0
    lhs_idx_set = (hasattr(args, "left_document_index")
                   and args.left_document_index is not None)
    rhs_idx_set = (hasattr(args, "right_document_index")
                   and args.right_document_index is not None)

    if not (lhs_loaded and rhs_loaded):
        # An error message has already been logged
        sys.exit(1)

    if lhs_doc_count > 1 and not lhs_idx_set:
        log.critical(
            ("--left-document-index|-L must be set; the source contains {}"
             " documents.").format(lhs_doc_count), 1)
    lhs_index = args.left_document_index if lhs_idx_set else 0
    lhs_document = get_doc(log, lhs_docs, lhs_index)

    if rhs_doc_count > 1 and not rhs_idx_set:
        log.critical(
            ("--right-document-index|-R must be set; the source contains {}"
             " documents.").format(rhs_doc_count), 1)
    rhs_index = args.right_document_index if rhs_idx_set else 0
    rhs_document = get_doc(log, rhs_docs, rhs_index)

    diff = Differ(DifferConfig(log, args),
                  log,
                  lhs_document,
                  ignore_eyaml_values=args.ignore_eyaml_values,
                  binary=args.eyaml,
                  publickey=args.publickey,
                  privatekey=args.privatekey)

    try:
        diff.compare_to(rhs_document)
    except EYAMLCommandException as ex:
        log.critical(ex, 1)

    exit_state = 1 if print_report(log, args, diff) else 0
    sys.exit(exit_state)
Ejemplo n.º 15
0
    def test_null_docs_have_nothing_to_delete(self, capsys):
        args = SimpleNamespace(verbose=False, quiet=False, debug=True)
        logger = ConsolePrinter(args)
        processor = Processor(logger, None)

        deleted_nodes = []
        for nc in processor.delete_nodes("**"):
            deleted_nodes.append(nc)

        console = capsys.readouterr()
        assert "Refusing to delete nodes from a null document" in console.out
Ejemplo n.º 16
0
def get_search_term(logger: ConsolePrinter,
                    expression: str) -> Optional[SearchTerms]:
    """
    Attempts to cast a search expression into a SearchTerms instance.  Returns
    None on failure.
    """
    # The leading character must be a known search operator
    check_operator = expression[0] if expression else ""
    if not (PathSearchMethods.is_operator(check_operator)
            or check_operator == '!'):
        logger.error(("Invalid search expression, '{}'.  The first symbol of" +
                      " every search expression must be one of:  {}").format(
                          expression,
                          ", ".join(PathSearchMethods.get_operators())))
        return None

    if not len(expression) > 1:
        # Empty expressions do nothing
        logger.error(
            "An EXPRESSION with only a search operator has no effect, '{}'.".
            format(expression))
        return None

    try:
        exterm = create_searchterms_from_pathattributes(
            YAMLPath("[*{}]".format(expression)).escaped[0][1])
    except YAMLPathException as ex:
        logger.error(("Invalid search expression, '{}', due to:  {}").format(
            expression, ex))
        return None

    return exterm
Ejemplo n.º 17
0
def get_doc_mergers(
    log: ConsolePrinter, yaml_editor: YAML, config: MergerConfig,
    yaml_file: str
) -> Tuple[List[Merger], bool]:
    """Create a list of Mergers, one for each source document."""
    docs_loaded = True
    if yaml_file != "-" and not isfile(yaml_file):
        log.error("Not a file:  {}".format(yaml_file))
        return ([], False)

    doc_mergers: List[Merger] = []
    for (yaml_data, doc_loaded) in Parsers.get_yaml_multidoc_data(
        yaml_editor, log, yaml_file
    ):
        if not doc_loaded:
            # An error message has already been logged
            doc_mergers.clear()
            docs_loaded = False
            break

        doc_mergers.append(Merger(log, yaml_data, config))

    return (doc_mergers, docs_loaded)
Ejemplo n.º 18
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    exit_state = 0
    lhs_file = args.yaml_files[0]
    rhs_file = args.yaml_files[1]
    lhs_yaml = Parsers.get_yaml_editor()
    rhs_yaml = Parsers.get_yaml_editor()

    (lhs_document, doc_loaded) = Parsers.get_yaml_data(lhs_yaml, log, lhs_file)
    if not doc_loaded:
        # An error message has already been logged
        sys.exit(1)

    (rhs_document, doc_loaded) = Parsers.get_yaml_data(rhs_yaml, log, rhs_file)
    if not doc_loaded:
        # An error message has already been logged
        sys.exit(1)

    diff = Differ(DifferConfig(log, args),
                  log,
                  lhs_document,
                  ignore_eyaml_values=args.ignore_eyaml_values,
                  binary=args.eyaml,
                  publickey=args.publickey,
                  privatekey=args.privatekey)

    try:
        diff.compare_to(rhs_document)
    except EYAMLCommandException as ex:
        log.critical(ex, 1)

    exit_state = 1 if print_report(log, args, diff) else 0
    sys.exit(exit_state)
Ejemplo n.º 19
0
def extract_values(data: {}, path: str) -> []:
    class Args:
        debug = True
        verbose = False
        quiet = True

    args = Args()
    log = ConsolePrinter(args)

    try:
        processor = Processor(log, data)
        nodes = processor.get_nodes(path)
        return [n.node for n in nodes]
    except YAMLPathException as ex:
        print(ex)
Ejemplo n.º 20
0
def main():
    """Main code."""
    # Process any command-line arguments
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    search_values = True
    search_keys = False
    include_key_aliases = False
    include_value_aliases = False

    if args.onlykeynames:
        search_values = False
        search_keys = True
    elif args.keynames:
        search_keys = True

    if args.include_aliases is IncludeAliases.INCLUDE_ALL_ALIASES:
        include_key_aliases = True
        include_value_aliases = True
    elif args.include_aliases is IncludeAliases.INCLUDE_KEY_ALIASES:
        include_key_aliases = True
    elif args.include_aliases is IncludeAliases.INCLUDE_VALUE_ALIASES:
        include_value_aliases = True

    # Prepare the YAML processor
    yaml = get_yaml_editor()
    processor = EYAMLProcessor(log,
                               None,
                               binary=args.eyaml,
                               publickey=args.publickey,
                               privatekey=args.privatekey)

    # Process the input file(s)
    exit_state = 0

    # pylint: disable=too-many-nested-blocks
    for yaml_file in args.yaml_files:
        # Try to open the file
        yaml_data = get_yaml_data(yaml, log, yaml_file)
        if yaml_data is None:
            # An error message has already been logged
            exit_state = 3
            continue

        # Process all searches
        processor.data = yaml_data
        yaml_paths = []
        for expression in args.search:
            exterm = get_search_term(log, expression)
            log.debug(("yaml_paths::main:" +
                       "converting search expression '{}' into '{}'").format(
                           expression, exterm))
            if exterm is None:
                exit_state = 1
                continue

            for result in search_for_paths(
                    log,
                    processor,
                    yaml_data,
                    exterm,
                    args.pathsep,
                    search_values=search_values,
                    search_keys=search_keys,
                    search_anchors=args.refnames,
                    include_key_aliases=include_key_aliases,
                    include_value_aliases=include_value_aliases,
                    decrypt_eyaml=args.decrypt,
                    expand_children=args.expand):
                # Record only unique results
                add_entry = True
                for entry in yaml_paths:
                    if str(result) == str(entry[1]):
                        add_entry = False
                        break
                if add_entry:
                    yaml_paths.append((expression, result))

        if not yaml_paths:
            # Nothing further to do when there are no results
            continue

        if args.except_expression:
            for expression in args.except_expression:
                exterm = get_search_term(log, expression)
                log.debug(
                    ("yaml_paths::main:" +
                     "converted except expression '{}' into '{}'").format(
                         expression, exterm))
                if exterm is None:
                    exit_state = 1
                    continue

                for result in search_for_paths(
                        log,
                        processor,
                        yaml_data,
                        exterm,
                        args.pathsep,
                        search_values=search_values,
                        search_keys=search_keys,
                        search_anchors=args.refnames,
                        include_key_aliases=include_key_aliases,
                        include_value_aliases=include_value_aliases,
                        decrypt_eyaml=args.decrypt,
                        expand_children=args.expand):
                    for entry in yaml_paths:
                        if str(result) == str(entry[1]):
                            yaml_paths.remove(entry)
                            break  # Entries are already unique

        print_results(args, processor, yaml_file, yaml_paths)

    exit(exit_state)
 def test_verbose_noisy(self, capsys):
     args = SimpleNamespace(verbose=True, quiet=False, debug=False)
     logger = ConsolePrinter(args)
     logger.verbose("Test")
     console = capsys.readouterr()
     assert console.out == "Test\n"
Ejemplo n.º 22
0
def yield_children(logger: ConsolePrinter, data: Any,
                   terms: SearchTerms, pathsep: PathSeperators,
                   build_path: str, seen_anchors: List[str],
                   **kwargs: bool) -> Generator[YAMLPath, None, None]:
    """
    Dump the YAML Path of every child node beneath a given parent.

    Except for unwanted aliases, the dump is unconditional.
    """
    include_key_aliases: bool = kwargs.pop("include_key_aliases", True)
    include_value_aliases: bool = kwargs.pop("include_value_aliases", False)
    search_anchors: bool = kwargs.pop("search_anchors", False)
    logger.debug(
        "Dumping all children in data of type, {}:"
        .format(type(data)), data=data,
        prefix="yaml_paths::yield_children:  ")

    exclude_alias_matchers = [AnchorMatches.UNSEARCHABLE_ALIAS,
                              AnchorMatches.ALIAS_EXCLUDED]

    if isinstance(data, CommentedSeq):
        if not build_path and pathsep is PathSeperators.FSLASH:
            build_path = str(pathsep)
        build_path += "["

        for idx, ele in enumerate(data):
            anchor_matched = Searches.search_anchor(
                ele, terms, seen_anchors, search_anchors=search_anchors,
                include_aliases=include_value_aliases)
            logger.debug(
                ("yaml_paths::yield_children<list>:  "
                 + "element[{}] has anchor search => {}.")
                .format(idx, anchor_matched))

            # Build the temporary YAML Path using either Anchor or Index
            if anchor_matched is AnchorMatches.NO_ANCHOR:
                # Not an anchor/alias, so ref this node by its index
                tmp_path = build_path + str(idx) + "]"
            else:
                tmp_path = "{}&{}]".format(
                    build_path,
                    YAMLPath.escape_path_section(ele.anchor.value, pathsep)
                )

            if (not include_value_aliases
                    and anchor_matched in exclude_alias_matchers):
                continue

            if isinstance(ele, (CommentedMap, CommentedSeq)):
                for path in yield_children(
                        logger, ele, terms, pathsep, tmp_path, seen_anchors,
                        search_anchors=search_anchors,
                        include_key_aliases=include_key_aliases,
                        include_value_aliases=include_value_aliases):
                    yield path
            else:
                yield YAMLPath(tmp_path)

    elif isinstance(data, CommentedMap):
        if build_path:
            build_path += str(pathsep)
        elif pathsep is PathSeperators.FSLASH:
            build_path = str(pathsep)

        pool = data.non_merged_items()
        if include_key_aliases or include_value_aliases:
            pool = data.items()

        for key, val in pool:
            tmp_path = build_path + YAMLPath.escape_path_section(key, pathsep)

            key_anchor_matched = Searches.search_anchor(
                key, terms, seen_anchors, search_anchors=search_anchors,
                include_aliases=include_key_aliases)
            val_anchor_matched = Searches.search_anchor(
                val, terms, seen_anchors, search_anchors=search_anchors,
                include_aliases=include_value_aliases)
            logger.debug(
                ("yaml_paths::yield_children<dict>:  "
                 + "key[{}]:value have value anchor search => {}:{}.")
                .format(key, key_anchor_matched, val_anchor_matched))

            if (
                    (not include_key_aliases
                     and key_anchor_matched in exclude_alias_matchers)
                    or (not include_value_aliases
                        and val_anchor_matched in exclude_alias_matchers)
            ):
                continue

            if isinstance(val, (CommentedSeq, CommentedMap)):
                for path in yield_children(
                        logger, val, terms, pathsep, tmp_path, seen_anchors,
                        search_anchors=search_anchors,
                        include_key_aliases=include_key_aliases,
                        include_value_aliases=include_value_aliases):
                    yield path
            else:
                yield YAMLPath(tmp_path)

    else:
        if not build_path and pathsep is PathSeperators.FSLASH:
            build_path = str(pathsep)
        yield YAMLPath(build_path)
Ejemplo n.º 23
0
def main():
    """Main code."""
    # Process any command-line arguments
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    search_values = True
    search_keys = False
    include_key_aliases = False
    include_value_aliases = False

    if args.onlykeynames:
        search_values = False
        search_keys = True
    elif args.keynames:
        search_keys = True

    if args.include_aliases is IncludeAliases.INCLUDE_ALL_ALIASES:
        include_key_aliases = True
        include_value_aliases = True
    elif args.include_aliases is IncludeAliases.INCLUDE_KEY_ALIASES:
        include_key_aliases = True
    elif args.include_aliases is IncludeAliases.INCLUDE_VALUE_ALIASES:
        include_value_aliases = True

    # Prepare the YAML processor
    yaml = Parsers.get_yaml_editor()
    processor = EYAMLProcessor(
        log, None, binary=args.eyaml,
        publickey=args.publickey, privatekey=args.privatekey)

    # Process the input file(s)
    exit_state = 0
    file_tally = -1
    consumed_stdin = False

    for yaml_file in args.yaml_files:
        file_tally += 1
        if yaml_file.strip() == "-":
            consumed_stdin = True

        log.debug(
            "yaml_merge::main:  Processing file, {}".format(
                "STDIN" if yaml_file.strip() == "-" else yaml_file))

        proc_state = process_yaml_file(
            args, yaml, log, yaml_file, processor, search_values, search_keys,
            include_key_aliases, include_value_aliases, file_tally
        )

        if proc_state != 0:
            exit_state = proc_state

    # Check for a waiting STDIN document
    if (exit_state == 0
        and not consumed_stdin
        and not args.nostdin
        and not sys.stdin.isatty()
    ):
        file_tally += 1
        exit_state = process_yaml_file(
            args, yaml, log, "-", processor, search_values, search_keys,
            include_key_aliases, include_value_aliases, file_tally
        )

    sys.exit(exit_state)
Ejemplo n.º 24
0
def main():
    """Main code."""
    # Process any command-line arguments
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    processor = EYAMLProcessor(log, None, binary=args.eyaml)

    # Prep the YAML parser
    yaml = Parsers.get_yaml_editor()

    # Process the input file(s)
    in_file_count = len(args.yaml_files)
    exit_state = 0
    for yaml_file in args.yaml_files:
        file_changed = False
        backup_file = yaml_file + ".bak"
        seen_anchors = []

        # Each YAML_FILE must actually be a file
        if not isfile(yaml_file):
            log.error("Not a file:  {}".format(yaml_file))
            exit_state = 2
            continue

        # Don't bother with the file change update when there's only one input
        # file.
        if in_file_count > 1:
            log.info("Processing {}...".format(yaml_file))

        # Try to open the file
        (yaml_data, doc_loaded) = Parsers.get_yaml_data(yaml, log, yaml_file)
        if not doc_loaded:
            # An error message has already been logged
            exit_state = 3
            continue

        # Process all EYAML values
        processor.data = yaml_data
        for yaml_path in processor.find_eyaml_paths():
            # Use ::get_nodes() instead of ::get_eyaml_values() here in order
            # to ignore values that have already been rotated via their
            # Anchors.
            for node_coordinate in processor.get_nodes(yaml_path,
                                                       mustexist=True):
                # Ignore values which are Aliases for those already decrypted
                node = node_coordinate.node
                anchor_name = Anchors.get_node_anchor(node)
                if anchor_name is not None:
                    if anchor_name in seen_anchors:
                        continue

                    seen_anchors.append(anchor_name)

                log.verbose("Decrypting value(s) at {}.".format(yaml_path))
                processor.publickey = args.oldpublickey
                processor.privatekey = args.oldprivatekey

                try:
                    txtval = processor.decrypt_eyaml(node)
                except EYAMLCommandException as ex:
                    log.error(ex)
                    exit_state = 3
                    continue

                # Prefer block (folded) values unless the original YAML value
                # was already a massivly long (string) line.
                output = EYAMLOutputFormats.BLOCK
                if not isinstance(node, FoldedScalarString):
                    output = EYAMLOutputFormats.STRING

                # Re-encrypt the value with new EYAML keys
                processor.publickey = args.newpublickey
                processor.privatekey = args.newprivatekey

                try:
                    processor.set_eyaml_value(yaml_path, txtval, output=output)
                except EYAMLCommandException as ex:
                    log.error(ex)
                    exit_state = 3
                    continue

                file_changed = True

        # Save the changes
        if file_changed:
            if args.backup:
                log.verbose("Saving a backup of {} to {}.".format(
                    yaml_file, backup_file))
                if exists(backup_file):
                    remove(backup_file)
                copy2(yaml_file, backup_file)

            log.verbose("Writing changed data to {}.".format(yaml_file))
            with open(yaml_file, 'w', encoding='utf-8') as yaml_dump:
                yaml.dump(yaml_data, yaml_dump)

    sys.exit(exit_state)
Ejemplo n.º 25
0
    def get_yaml_data(parser: Any, logger: ConsolePrinter, source: str,
                      **kwargs) -> Tuple[Any, bool]:
        """
        Parse YAML/Compatible data and return the ruamel.yaml object result.

        All known issues are caught and distinctively logged.

        Parameters:
        1. parser (ruamel.yaml.YAML) The YAML data parser
        2. logger (ConsolePrinter) The logging facility
        3. source (str) The source file or serialized literal to load; can be -
           for reading from STDIN (implies literal=True)

        Keyword Parameters:
        * literal (bool) `source` is literal serialized YAML data rather than a
          file-spec, so load it directly

        Returns:  Tuple[Any, bool] A tuple containing the document and its
        success/fail state.  The first field is the parsed document; will be
        None for empty documents and for documents which could not be read.
        The second field will be True when there were no errors during parsing
        and False, otherwise.
        """
        literal = kwargs.pop("literal", False)
        yaml_data = None
        data_available = True

        # This code traps errors and warnings from ruamel.yaml, substituting
        # lengthy stack-dumps with specific, meaningful feedback.  Further,
        # some warnings are treated as errors by ruamel.yaml, so these are also
        # coallesced into cleaner feedback.
        try:
            with warnings.catch_warnings():
                warnings.filterwarnings("error")
                if source == "-":
                    yaml_data = parser.load(stdin.read())
                else:
                    if literal:
                        yaml_data = parser.load(source)
                    else:
                        with open(source, 'r') as fhnd:
                            yaml_data = parser.load(fhnd)
        except KeyboardInterrupt:
            logger.error("Aborting data load due to keyboard interrupt!")
            data_available = False
        except FileNotFoundError:
            logger.error("File not found:  {}".format(source))
            data_available = False
        except ParserError as ex:
            logger.error("YAML parsing error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
            data_available = False
        except ComposerError as ex:
            logger.error("YAML composition error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
            data_available = False
        except ConstructorError as ex:
            logger.error("YAML construction error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
            data_available = False
        except ScannerError as ex:
            logger.error("YAML syntax error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
            data_available = False
        except DuplicateKeyError as dke:
            omits = [
                "while constructing",
                "To suppress this",
                "readthedocs",
                "future releases",
                "the new API",
            ]
            message = str(dke).split("\n")
            newmsg = ""
            for line in message:
                line = line.strip()
                if not line:
                    continue
                write_line = True
                for omit in omits:
                    if omit in line:
                        write_line = False
                        break
                if write_line:
                    newmsg += "\n   " + line
            logger.error("Duplicate Hash key detected:  {}".format(newmsg))
            data_available = False
        except ReusedAnchorWarning as raw:
            logger.error("Duplicate YAML Anchor detected:  {}".format(
                str(raw).replace("occurrence   ",
                                 "occurrence ").replace("\n", "\n   ")))
            data_available = False

        return (yaml_data, data_available)
Ejemplo n.º 26
0
    def get_yaml_multidoc_data(
            parser: Any, logger: ConsolePrinter, source: str,
            **kwargs) -> Generator[Tuple[Any, bool], None, None]:
        """
        Parse YAML/Compatible multi-docs and yield each ruamel.yaml object.

        All known issues are caught and distinctively logged.

        Parameters:
        1. parser (ruamel.yaml.YAML) The YAML data parser
        2. logger (ConsolePrinter) The logging facility
        3. source (str) The source file to load; can be - for reading from
           STDIN

        Keyword Parameters:
        * literal (bool) `source` is literal serialized YAML data rather than a
          file-spec, so load it directly

        Returns:  Generator[Tuple[Any, bool], None, None] A tuple for each
        document as it is parsed.  The first field is the parsed document; will
        be None for empty documents and for documents which could not be read.
        The second field will be True when there were no errors during parsing
        and False, otherwise.
        """
        literal = kwargs.pop("literal", False)

        # This code traps errors and warnings from ruamel.yaml, substituting
        # lengthy stack-dumps with specific, meaningful feedback.  Further,
        # some warnings are treated as errors by ruamel.yaml, so these are also
        # coallesced into cleaner feedback.
        has_error = False
        try:
            with warnings.catch_warnings():
                warnings.filterwarnings("error")
                if source == "-":
                    doc_yielded = False
                    for document in parser.load_all(stdin.read()):
                        doc_yielded = True
                        logger.debug(
                            "Yielding document from {}:".format(source),
                            prefix="get_yaml_multidoc_data: ",
                            data=document)
                        yield (document, True)

                    # The user sent a deliberately empty document via STDIN
                    if not doc_yielded:
                        yield ("", True)
                else:
                    if literal:
                        for document in parser.load_all(source):
                            yield (document, True)
                    else:
                        with open(source, 'r') as fhnd:
                            for document in parser.load_all(fhnd):
                                logger.debug(
                                    "Yielding document from {}:".format(
                                        source),
                                    prefix="get_yaml_multidoc_data: ",
                                    data=document)
                                yield (document, True)
        except KeyboardInterrupt:
            has_error = True
            logger.error("Aborting data load due to keyboard interrupt!")
        except FileNotFoundError:
            has_error = True
            logger.error("File not found:  {}".format(source))
        except ParserError as ex:
            has_error = True
            logger.error("YAML parsing error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
        except ComposerError as ex:
            has_error = True
            logger.error("YAML composition error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
        except ConstructorError as ex:
            has_error = True
            logger.error("YAML construction error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
        except ScannerError as ex:
            has_error = True
            logger.error("YAML syntax error {}:  {}".format(
                str(ex.problem_mark).lstrip(), ex.problem))
        except DuplicateKeyError as dke:
            has_error = True
            omits = [
                "while constructing",
                "To suppress this",
                "readthedocs",
                "future releases",
                "the new API",
            ]
            message = str(dke).split("\n")
            newmsg = ""
            for line in message:
                line = line.strip()
                if not line:
                    continue
                write_line = True
                for omit in omits:
                    if omit in line:
                        write_line = False
                        break
                if write_line:
                    newmsg += "\n   " + line
            logger.error("Duplicate Hash key detected:  {}".format(newmsg))
        except ReusedAnchorWarning as raw:
            has_error = True
            logger.error("Duplicate YAML Anchor detected:  {}".format(
                str(raw).replace("occurrence   ",
                                 "occurrence ").replace("\n", "\n   ")))

        if has_error:
            yield (None, False)
Ejemplo n.º 27
0
def search_for_paths(logger: ConsolePrinter, processor: EYAMLProcessor,
                     data: Any, terms: SearchTerms,
                     pathsep: PathSeperators = PathSeperators.DOT,
                     build_path: str = "",
                     seen_anchors: Optional[List[str]] = None,
                     **kwargs: bool) -> Generator[YAMLPath, None, None]:
    """
    Recursively search a data structure for nodes matching an expression.

    The nodes can be keys, values, and/or elements.  When dealing with anchors
    and their aliases, the caller indicates whether to include only the
    original anchor or the anchor and all of its (duplicate) aliases.
    """
    search_values: bool = kwargs.pop("search_values", True)
    search_keys: bool = kwargs.pop("search_keys", False)
    search_anchors: bool = kwargs.pop("search_anchors", False)
    include_key_aliases: bool = kwargs.pop("include_key_aliases", True)
    include_value_aliases: bool = kwargs.pop("include_value_aliases", False)
    decrypt_eyaml: bool = kwargs.pop("decrypt_eyaml", False)
    expand_children: bool = kwargs.pop("expand_children", False)
    strsep = str(pathsep)
    invert = terms.inverted
    method = terms.method
    term = terms.term

    if seen_anchors is None:
        seen_anchors = []

    if isinstance(data, CommentedSeq):
        # Build the path
        if not build_path and pathsep is PathSeperators.FSLASH:
            build_path = strsep
        build_path += "["

        for idx, ele in enumerate(data):
            # Any element may or may not have an Anchor/Alias
            anchor_matched = Searches.search_anchor(
                ele, terms, seen_anchors, search_anchors=search_anchors,
                include_aliases=include_value_aliases)
            logger.debug(
                ("yaml_paths::search_for_paths<list>:"
                 + "anchor search => {}.")
                .format(anchor_matched)
            )

            # Build the temporary YAML Path using either Anchor or Index
            if anchor_matched is AnchorMatches.NO_ANCHOR:
                # Not an anchor/alias, so ref this node by its index
                tmp_path = build_path + str(idx) + "]"
            else:
                tmp_path = "{}&{}]".format(
                    build_path,
                    YAMLPath.escape_path_section(ele.anchor.value, pathsep)
                )

            if anchor_matched is AnchorMatches.ALIAS_EXCLUDED:
                continue

            if anchor_matched in [AnchorMatches.MATCH,
                                  AnchorMatches.ALIAS_INCLUDED]:
                logger.debug(
                    ("yaml_paths::search_for_paths<list>:"
                     + "yielding an Anchor/Alias match, {}.")
                    .format(tmp_path)
                )
                if expand_children:
                    for path in yield_children(
                            logger, ele, terms, pathsep, tmp_path,
                            seen_anchors, search_anchors=search_anchors,
                            include_key_aliases=include_key_aliases,
                            include_value_aliases=include_value_aliases):
                        yield path
                else:
                    yield YAMLPath(tmp_path)
                continue

            if isinstance(ele, (CommentedSeq, CommentedMap)):
                logger.debug(
                    "Recursing into complex data:", data=ele,
                    prefix="yaml_paths::search_for_paths<list>:  ",
                    footer=">>>> >>>> >>>> >>>> >>>> >>>> >>>>")
                for subpath in search_for_paths(
                        logger, processor, ele, terms, pathsep, tmp_path,
                        seen_anchors, search_values=search_values,
                        search_keys=search_keys, search_anchors=search_anchors,
                        include_key_aliases=include_key_aliases,
                        include_value_aliases=include_value_aliases,
                        decrypt_eyaml=decrypt_eyaml,
                        expand_children=expand_children
                ):
                    logger.debug(
                        "Yielding RECURSED match, {}.".format(subpath),
                        prefix="yaml_paths::search_for_paths<list>:  ",
                        footer="<<<< <<<< <<<< <<<< <<<< <<<< <<<<"
                    )
                    yield subpath
            elif search_values:
                if (anchor_matched is AnchorMatches.UNSEARCHABLE_ALIAS
                        and not include_value_aliases):
                    continue

                check_value = ele
                if decrypt_eyaml and processor.is_eyaml_value(ele):
                    check_value = processor.decrypt_eyaml(ele)

                matches = Searches.search_matches(method, term, check_value)
                if (matches and not invert) or (invert and not matches):
                    logger.debug(
                        ("yaml_paths::search_for_paths<list>:"
                         + "yielding VALUE match, {}:  {}."
                        ).format(check_value, tmp_path)
                    )
                    yield YAMLPath(tmp_path)

    # pylint: disable=too-many-nested-blocks
    elif isinstance(data, CommentedMap):
        if build_path:
            build_path += strsep
        elif pathsep is PathSeperators.FSLASH:
            build_path = strsep

        pool = data.non_merged_items()
        if include_key_aliases or include_value_aliases:
            pool = data.items()

        for key, val in pool:
            tmp_path = build_path + YAMLPath.escape_path_section(key, pathsep)

            # Search the value anchor to have it on record, in case the key
            # anchor match would otherwise block the value anchor from
            # appearing in seen_anchors (which is important).
            val_anchor_matched = Searches.search_anchor(
                val, terms, seen_anchors, search_anchors=search_anchors,
                include_aliases=include_value_aliases)
            logger.debug(
                ("yaml_paths::search_for_paths<dict>:"
                 + "VALUE anchor search => {}.")
                .format(val_anchor_matched)
            )

            # Search the key when the caller wishes it.
            if search_keys:
                # The key itself may be an Anchor or Alias.  Search it when the
                # caller wishes.
                key_anchor_matched = Searches.search_anchor(
                    key, terms, seen_anchors, search_anchors=search_anchors,
                    include_aliases=include_key_aliases)
                logger.debug(
                    ("yaml_paths::search_for_paths<dict>:"
                     + "KEY anchor search, {}:  {}.")
                    .format(key, key_anchor_matched)
                )

                if key_anchor_matched in [AnchorMatches.MATCH,
                                          AnchorMatches.ALIAS_INCLUDED]:
                    logger.debug(
                        ("yaml_paths::search_for_paths<dict>:"
                         + "yielding a KEY-ANCHOR match, {}."
                        ).format(key, tmp_path)
                    )
                    if expand_children:
                        for path in yield_children(
                                logger, val, terms, pathsep, tmp_path,
                                seen_anchors, search_anchors=search_anchors,
                                include_key_aliases=include_key_aliases,
                                include_value_aliases=include_value_aliases):
                            yield path
                    else:
                        yield YAMLPath(tmp_path)
                    continue

                # Search the name of the key, itself
                matches = Searches.search_matches(method, term, key)
                if (matches and not invert) or (invert and not matches):
                    logger.debug(
                        ("yaml_paths::search_for_paths<dict>:"
                         + "yielding KEY name match, {}:  {}."
                        ).format(key, tmp_path)
                    )
                    if expand_children:
                        # Include every non-excluded child node under this
                        # matched parent node.
                        for path in yield_children(
                                logger, val, terms, pathsep, tmp_path,
                                seen_anchors, search_anchors=search_anchors,
                                include_key_aliases=include_key_aliases,
                                include_value_aliases=include_value_aliases):
                            yield path
                    else:
                        # No other matches within this node matter because they
                        # are already in the result.
                        yield YAMLPath(tmp_path)
                    continue

            # The value may itself be anchored; search it if requested
            if val_anchor_matched is AnchorMatches.ALIAS_EXCLUDED:
                continue

            if val_anchor_matched in [AnchorMatches.MATCH,
                                      AnchorMatches.ALIAS_INCLUDED]:
                logger.debug(
                    ("yaml_paths::search_for_paths<dict>:"
                     + "yielding a VALUE-ANCHOR match, {}.")
                    .format(tmp_path)
                )
                if expand_children:
                    for path in yield_children(
                            logger, val, terms, pathsep, tmp_path,
                            seen_anchors, search_anchors=search_anchors,
                            include_key_aliases=include_key_aliases,
                            include_value_aliases=include_value_aliases):
                        yield path
                else:
                    yield YAMLPath(tmp_path)
                continue

            if isinstance(val, (CommentedSeq, CommentedMap)):
                logger.debug(
                    "Recursing into complex data:", data=val,
                    prefix="yaml_paths::search_for_paths<dict>:  ",
                    footer=">>>> >>>> >>>> >>>> >>>> >>>> >>>>"
                )
                for subpath in search_for_paths(
                        logger, processor, val, terms, pathsep, tmp_path,
                        seen_anchors, search_values=search_values,
                        search_keys=search_keys, search_anchors=search_anchors,
                        include_key_aliases=include_key_aliases,
                        include_value_aliases=include_value_aliases,
                        decrypt_eyaml=decrypt_eyaml,
                        expand_children=expand_children
                ):
                    logger.debug(
                        "Yielding RECURSED match, {}.".format(subpath),
                        prefix="yaml_paths::search_for_paths<dict>:  ",
                        footer="<<<< <<<< <<<< <<<< <<<< <<<< <<<<"
                    )
                    yield subpath
            elif search_values:
                if (val_anchor_matched is AnchorMatches.UNSEARCHABLE_ALIAS
                        and not include_value_aliases):
                    continue

                check_value = val
                if decrypt_eyaml and processor.is_eyaml_value(val):
                    check_value = processor.decrypt_eyaml(val)

                matches = Searches.search_matches(method, term, check_value)
                if (matches and not invert) or (invert and not matches):
                    logger.debug(
                        ("yaml_paths::search_for_paths<dict>:"
                         + "yielding VALUE match, {}:  {}."
                        ).format(check_value, tmp_path)
                    )
                    yield YAMLPath(tmp_path)
 def test_debug_off(self, capsys):
     args = SimpleNamespace(verbose=False, quiet=False, debug=False)
     logger = ConsolePrinter(args)
     logger.debug("Test")
     console = capsys.readouterr()
     assert not console.out
Ejemplo n.º 29
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    change_path = YAMLPath(args.change, pathsep=args.pathsep)
    must_exist = args.mustexist or args.saveto

    # Obtain the replacement value
    consumed_stdin = False
    new_value = None
    has_new_value = False
    if args.value or args.value == "":
        new_value = args.value
        has_new_value = True
    elif args.stdin:
        new_value = ''.join(sys.stdin.readlines())
        consumed_stdin = True
        has_new_value = True
    elif args.file:
        with open(args.file, 'r') as fhnd:
            new_value = fhnd.read().rstrip()
        has_new_value = True
    elif args.null:
        new_value = None
        has_new_value = True
    elif args.random is not None:
        new_value = ''.join(
            secrets.choice(args.random_from) for _ in range(args.random))
        has_new_value = True

    # Prep the YAML parser
    yaml = Parsers.get_yaml_editor()

    # Attempt to open the YAML file; check for parsing errors
    if args.yaml_file:
        yaml_data = _try_load_input_file(args, log, yaml, change_path,
                                         new_value)
        if args.yaml_file.strip() == '-':
            consumed_stdin = True

    # Check for a waiting STDIN document
    if (not consumed_stdin and not args.yaml_file and not args.nostdin
            and not sys.stdin.isatty()):
        args.yaml_file = "-"
        yaml_data = _try_load_input_file(args, log, yaml, change_path,
                                         new_value)

    # Load the present nodes at the specified YAML Path
    processor = EYAMLProcessor(log,
                               yaml_data,
                               binary=args.eyaml,
                               publickey=args.publickey,
                               privatekey=args.privatekey)
    change_node_coordinates = _get_nodes(
        log,
        processor,
        change_path,
        must_exist=must_exist,
        default_value=("" if new_value else " "))

    old_format = YAMLValueFormats.DEFAULT
    if len(change_node_coordinates) == 1:
        # When there is exactly one result, its old format can be known.  This
        # is necessary to retain whether the replacement value should be
        # represented later as a multi-line string when the new value is to be
        # encrypted.
        old_format = YAMLValueFormats.from_node(
            change_node_coordinates[0].node)

    # Check the value(s), if desired
    if args.check:
        for node_coordinate in change_node_coordinates:
            if processor.is_eyaml_value(node_coordinate.node):
                # Sanity check:  If either --publickey or --privatekey were set
                # then they must both be set in order to decrypt this value.
                # This is enforced only when the value must be decrypted due to
                # a --check request.
                if ((args.publickey and not args.privatekey)
                        or (args.privatekey and not args.publickey)):
                    log.error(
                        "Neither or both private and public EYAML keys must be"
                        + " set when --check is required to decrypt the old" +
                        " value.")
                    sys.exit(1)

                try:
                    check_value = processor.decrypt_eyaml(node_coordinate.node)
                except EYAMLCommandException as ex:
                    log.critical(ex, 1)
            else:
                check_value = node_coordinate.node

            if not args.check == check_value:
                log.critical(
                    '"{}" does not match the check value.'.format(args.check),
                    20)

    # Save the old value, if desired and possible
    if args.saveto:
        # Only one can be saved; otherwise it is impossible to meaningfully
        # convey to the end-user from exactly which other YAML node each saved
        # value came.
        if len(change_node_coordinates) > 1:
            log.critical(
                "It is impossible to meaningly save more than one matched" +
                " value.  Please omit --saveto or set --change to affect" +
                " exactly one value.", 1)

        saveto_path = YAMLPath(args.saveto, pathsep=args.pathsep)
        log.verbose("Saving the old value to {}.".format(saveto_path))

        # Folded EYAML values have their embedded newlines converted to spaces
        # when read.  As such, writing them back out breaks their original
        # format, despite being properly typed.  To restore the original
        # written form, reverse the conversion, here.
        old_value = change_node_coordinates[0].node
        if ((old_format is YAMLValueFormats.FOLDED
             or old_format is YAMLValueFormats.LITERAL)
                and EYAMLProcessor.is_eyaml_value(old_value)):
            old_value = old_value.replace(" ", "\n")

        try:
            processor.set_value(saveto_path,
                                Nodes.clone_node(old_value),
                                value_format=old_format,
                                tag=args.tag)
        except YAMLPathException as ex:
            log.critical(ex, 1)

    # Set the requested value
    log.verbose("Applying changes to {}.".format(change_path))
    if args.delete:
        # Destroy the collected nodes (from their parents) in the reverse order
        # they were discovered.  This is necessary lest Array elements be
        # improperly handled, leading to unwanted data loss.
        _delete_nodes(log, processor, change_node_coordinates)
    elif args.aliasof:
        # Assign the change nodes as Aliases of whatever --aliasof points to
        _alias_nodes(log, processor, change_node_coordinates, args.aliasof,
                     args.anchor)
    elif args.eyamlcrypt:
        # If the user hasn't specified a format, use the same format as the
        # value being replaced, if known.
        format_type = YAMLValueFormats.from_str(args.format)
        if format_type is YAMLValueFormats.DEFAULT:
            format_type = old_format

        output_type = EYAMLOutputFormats.STRING
        if format_type in [YAMLValueFormats.FOLDED, YAMLValueFormats.LITERAL]:
            output_type = EYAMLOutputFormats.BLOCK

        try:
            processor.set_eyaml_value(change_path,
                                      new_value,
                                      output=output_type,
                                      mustexist=False)
        except EYAMLCommandException as ex:
            log.critical(ex, 2)
    elif has_new_value:
        try:
            processor.set_value(change_path,
                                new_value,
                                value_format=args.format,
                                mustexist=must_exist,
                                tag=args.tag)
        except YAMLPathException as ex:
            log.critical(ex, 1)
    elif args.tag:
        _tag_nodes(processor.data, args.tag, change_node_coordinates)

    # Write out the result
    write_output_document(args, log, yaml, yaml_data)
    def test_debug_noisy(self, capsys):
        args = SimpleNamespace(verbose=False, quiet=False, debug=True)
        logger = ConsolePrinter(args)
        anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor")
        anchoredval = PlainScalarString("TestVal", anchor="Anchor")
        foldedstr = "123456789 123456789 123456789"
        foldedstrfolds = [10, 20]
        foldedval = FoldedScalarString(foldedstr)
        foldedval.fold_pos = foldedstrfolds

        logger.debug(anchoredval)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  (&Anchor)TestVal",
        ]) + "\n" == console.out

        logger.debug(["test", anchoredval])
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [0]test<class 'str'>",
            "DEBUG:  [1](&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
        ]) + "\n" == console.out

        logger.debug({"ichi": 1, anchoredkey: anchoredval})
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [ichi]1<class 'int'>",
            "DEBUG:  [TestKey](&KeyAnchor,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
        ]) + "\n" == console.out

        logger.debug({"ichi": 1, anchoredkey: "non-anchored value"})
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [ichi]1<class 'int'>",
            "DEBUG:  [TestKey](&KeyAnchor,_)non-anchored value<class 'str'>",
        ]) + "\n" == console.out

        logger.debug({"ichi": 1, "non-anchored-key": anchoredval})
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [ichi]1<class 'int'>",
            "DEBUG:  [non-anchored-key](_,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
        ]) + "\n" == console.out

        tagged_value = "value"
        tagged_value_node = TaggedScalar(tagged_value, tag="!tag")
        tagged_sequence = CommentedSeq(["a", "b"])
        tagged_sequence.yaml_set_tag("!raz")
        selfref_value = "self_referring"
        selfref_value_node = TaggedScalar(selfref_value, tag="!self_referring")
        logger.debug(
            "test_wrappers_consoleprinter:",
            prefix="test_debug_noisy:  ",
            header="--- HEADER ---",
            footer="=== FOOTER ===",
            data_header="+++ DATA HEADER +++",
            data_footer="::: DATA FOOTER :::",
            data=CommentedMap({
                "key": "value",
                "tagged": tagged_value_node,
                tagged_value_node: "untagged value",
                selfref_value_node: selfref_value_node,
                "array": ["ichi", "ni", "san"],
                "tagged_array": tagged_sequence,
                "aoh": [{"id": 1},{"id": 2},{"id": 3}],
                "aoa": [[True, True], [True, False], [False, True]],
                "dod": {"a": {"b": {"c": "d"}}},
                "set": CommentedSet(["one", "two"]),
            })
        )
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  test_debug_noisy:  --- HEADER ---",
            "DEBUG:  test_debug_noisy:  test_wrappers_consoleprinter:",
            "DEBUG:  test_debug_noisy:  +++ DATA HEADER +++",
            "DEBUG:  test_debug_noisy:  [key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  [tagged]<_,!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
            "DEBUG:  test_debug_noisy:  [value]<!tag,_>untagged value<class 'str'>",
            "DEBUG:  test_debug_noisy:  [self_referring]<!self_referring,!self_referring>self_referring<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
            "DEBUG:  test_debug_noisy:  [array][0]ichi<class 'str'>",
            "DEBUG:  test_debug_noisy:  [array][1]ni<class 'str'>",
            "DEBUG:  test_debug_noisy:  [array][2]san<class 'str'>",
            "DEBUG:  test_debug_noisy:  [tagged_array]<_,!raz>[0]a<class 'str'>",
            "DEBUG:  test_debug_noisy:  [tagged_array]<_,!raz>[1]b<class 'str'>",
            "DEBUG:  test_debug_noisy:  [aoh][0][id]1<class 'int'>",
            "DEBUG:  test_debug_noisy:  [aoh][1][id]2<class 'int'>",
            "DEBUG:  test_debug_noisy:  [aoh][2][id]3<class 'int'>",
            "DEBUG:  test_debug_noisy:  [aoa][0][0]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][0][1]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][1][0]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][1][1]False<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][2][0]False<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [aoa][2][1]True<class 'bool'>",
            "DEBUG:  test_debug_noisy:  [dod][a][b][c]d<class 'str'>",
            "DEBUG:  test_debug_noisy:  [set]{one}<class 'str'>",
            "DEBUG:  test_debug_noisy:  [set]{two}<class 'str'>",
            "DEBUG:  test_debug_noisy:  ::: DATA FOOTER :::",
            "DEBUG:  test_debug_noisy:  === FOOTER ===",
        ]) + "\n" == console.out

        logger.debug(tagged_value_node)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  <!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
        ])

        logger.debug(tagged_sequence)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  [tagged_array]<!raz>[0]a<class 'str'>",
            "DEBUG:  [tagged_array]<!raz>[1]b<class 'str'>",
        ])

        nc = NodeCoords(
            "value",
            dict(key="value"),
            "key",
            YAMLPath("doc_root.key"),
            [   (dict(doc_root=dict(key="value")), "doc_root"),
                (dict(key="value"), "key")],
            (PathSegmentTypes.KEY, "key")
        )
        logger.debug(
            "A node coordinate:", prefix="test_debug_noisy:  ", data=nc)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  test_debug_noisy:  A node coordinate:",
            "DEBUG:  test_debug_noisy:  (path)doc_root.key",
            "DEBUG:  test_debug_noisy:  (segment)[0]PathSegmentTypes.KEY<enum 'PathSegmentTypes'>",
            "DEBUG:  test_debug_noisy:  (segment)[1]key<class 'str'>",
            "DEBUG:  test_debug_noisy:  (node)value",
            "DEBUG:  test_debug_noisy:  (parent)[key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  (parentref)key",
            "DEBUG:  test_debug_noisy:  (ancestry)[0][0][doc_root][key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  (ancestry)[0][1]doc_root<class 'str'>",
            "DEBUG:  test_debug_noisy:  (ancestry)[1][0][key]value<class 'str'>",
            "DEBUG:  test_debug_noisy:  (ancestry)[1][1]key<class 'str'>",
        ]) + "\n" == console.out

        logger.debug(foldedval)
        console = capsys.readouterr()
        assert "\n".join([
            "DEBUG:  {}<class 'ruamel.yaml.scalarstring.FoldedScalarString'>,folded@{}".format(foldedstr, foldedstrfolds)
        ])