Esempio n. 1
0
def write_output_document(args, log, merger, yaml_editor):
    """Save a backup of the overwrite file, if requested."""
    if args.backup:
        backup_file = args.overwrite + ".bak"
        log.verbose(
            "Saving a backup of {} to {}."
            .format(args.overwrite, backup_file))
        if exists(backup_file):
            remove(backup_file)
        copy2(args.overwrite, backup_file)

    document_is_json = (
        merger.prepare_for_dump(yaml_editor, args.output)
        is OutputDocTypes.JSON)
    if args.output:
        with open(args.output, 'w') as out_fhnd:
            if document_is_json:
                json.dump(Parsers.jsonify_yaml_data(merger.data), out_fhnd)
            else:
                yaml_editor.dump(merger.data, out_fhnd)
    else:
        if document_is_json:
            json.dump(Parsers.jsonify_yaml_data(merger.data), sys.stdout)
        else:
            yaml_editor.dump(merger.data, sys.stdout)
Esempio n. 2
0
def write_output_document(
    args: argparse.Namespace, log: ConsolePrinter, yaml_editor: YAML,
    docs: List[Merger]
) -> None:
    """Save a backup of the overwrite file, if requested."""
    if args.backup:
        backup_file = args.overwrite + ".bak"
        log.verbose(
            "Saving a backup of {} to {}."
            .format(args.overwrite, backup_file))
        if exists(backup_file):
            remove(backup_file)
        copy2(args.overwrite, backup_file)

    document_is_json = (
        docs[0].prepare_for_dump(yaml_editor, args.output)
        is OutputDocTypes.JSON)

    dumps = []
    for doc in docs:
        doc.prepare_for_dump(yaml_editor, args.output)
        dumps.append(doc.data)

    if args.output:
        with open(args.output, 'w', encoding='utf-8') as out_fhnd:
            if document_is_json:
                if len(dumps) > 1:
                    for dump in dumps:
                        print(
                            json.dumps(Parsers.jsonify_yaml_data(dump)),
                            file=out_fhnd)
                else:
                    json.dump(Parsers.jsonify_yaml_data(dumps[0]), out_fhnd)
            else:
                if len(dumps) > 1:
                    yaml_editor.explicit_end = True  # type: ignore
                    yaml_editor.dump_all(dumps, out_fhnd)
                else:
                    yaml_editor.dump(dumps[0], out_fhnd)
    else:
        if document_is_json:
            if len(dumps) > 1:
                for dump in dumps:
                    print(json.dumps(Parsers.jsonify_yaml_data(dump)))
            else:
                json.dump(Parsers.jsonify_yaml_data(dumps[0]), sys.stdout)
        else:
            if len(dumps) > 1:
                yaml_editor.explicit_end = True  # type: ignore
                yaml_editor.dump_all(dumps, sys.stdout)
            else:
                yaml_editor.dump(dumps[0], sys.stdout)
Esempio n. 3
0
    def test_jsonify_complex_data(self):
        tagged_tag = "!tagged"
        tagged_value = "tagged value"
        tagged_scalar = ry.scalarstring.PlainScalarString(tagged_value)
        tagged_node = ry.comments.TaggedScalar(tagged_scalar, tag=tagged_tag)

        null_tag = "!null"
        null_value = None
        null_node = ry.comments.TaggedScalar(None, tag=null_tag)

        cdata = ry.comments.CommentedMap({
            "tagged":
            tagged_node,
            "null":
            null_node,
            "dates":
            ry.comments.CommentedSeq(
                [dt.date(2020, 10, 31),
                 dt.date(2020, 11, 3)])
        })
        jdata = Parsers.jsonify_yaml_data(cdata)
        assert jdata["tagged"] == tagged_value
        assert jdata["null"] == null_value
        assert jdata["dates"][0] == "2020-10-31"
        assert jdata["dates"][1] == "2020-11-03"
Esempio n. 4
0
 def _present_data(cls, data: Any, prefix: str) -> str:
     """Stringify data."""
     json_safe_data = Parsers.jsonify_yaml_data(data)
     formatted_data = json_safe_data
     if isinstance(json_safe_data, str):
         formatted_data = json_safe_data.strip()
     json_data = json.dumps(formatted_data).replace("\\n",
                                                    "\n{} ".format(prefix))
     data_tag = ""
     if isinstance(data, TaggedScalar) and data.tag.value:
         data_tag = "{} ".format(data.tag.value)
     return "{} {}{}".format(prefix, data_tag, json_data)
Esempio n. 5
0
def print_results(args: Any, processor: EYAMLProcessor, yaml_file: str,
                  yaml_paths: List[Tuple[str, YAMLPath]],
                  document_index: int) -> None:
    """Dump search results to STDOUT with optional and dynamic formatting."""
    in_expressions = len(args.search)
    print_file_path = not args.nofile
    print_expression = in_expressions > 1 and not args.noexpression
    print_yaml_path = not args.noyamlpath
    print_value = args.values
    buffers = [
        ": " if print_file_path or print_expression and
        (print_yaml_path or print_value) else "",
        ": " if print_yaml_path and print_value else "",
    ]
    for entry in yaml_paths:
        expression, result = entry
        resline = ""

        if print_file_path:
            display_file_name = ("STDIN"
                                 if yaml_file.strip() == "-" else yaml_file)
            resline += "{}/{}".format(display_file_name, document_index)

        if print_expression:
            resline += "[{}]".format(expression)

        resline += buffers[0]
        if print_yaml_path:
            if args.noescape:
                use_flash = args.pathsep is PathSeperators.FSLASH
                seglines = []
                join_mark = "/" if use_flash else "."
                path_prefix = "/" if use_flash else ""
                for (_, segment) in result.escaped:
                    seglines.append(str(segment))
                resline += "{}{}".format(path_prefix, join_mark.join(seglines))
            else:
                resline += "{}".format(result)

        resline += buffers[1]
        if print_value:
            # These results can have only one match, but make sure lest the
            # output become messy.
            for node_coordinate in processor.get_nodes(result, mustexist=True):
                node = node_coordinate.node
                if isinstance(node, (dict, list, CommentedSet)):
                    resline += "{}".format(
                        json.dumps(Parsers.jsonify_yaml_data(node)))
                else:
                    resline += "{}".format(str(node).replace("\n", r"\n"))
                break

        print(resline)
Esempio n. 6
0
    def prepare_for_dump(
        self, yaml_writer: Any, output_file: str = ""
    ) -> OutputDocTypes:
        """
        Prepare this merged document and its writer for final rendering.

        This coalesces the YAML writer's settings to, in particular,
        distinguish between YAML and JSON.  It will also force demarcation of
        every String key and value within the document when the output will be
        JSON.

        Parameters:
        1. yaml_writer (ruamel.yaml.YAML) The YAML document writer

        Returns:  (OutputDocTypes) One of:
          * OutputDocTypes.JSON:  The document and yaml_writer are JSON format.
          * OutputDocTypes.YAML:  The document and yaml_writer are YAML format.
        """
        # Check whether the user is forcing an output format
        doc_format = self.config.get_document_format()
        if doc_format is OutputDocTypes.AUTO:
            # Identify by file-extension, if it indicates a known type
            file_extension = (Path(output_file).suffix.lower()
                              if output_file else "")
            if file_extension in [".json", ".yaml", ".yml"]:
                is_flow = file_extension == ".json"
            else:
                # Check whether the document root is in flow or block format
                is_flow = True
                if hasattr(self.data, "fa"):
                    is_flow = self.data.fa.flow_style()
        else:
            is_flow = doc_format is OutputDocTypes.JSON

        if is_flow:
            # Dump the document as true JSON and reload it; this automatically
            # exlodes all aliases.
            xfer_buffer = StringIO()
            json.dump(Parsers.jsonify_yaml_data(self.data), xfer_buffer)
            xfer_buffer.seek(0)
            self.data = yaml_writer.load(xfer_buffer)

            # Ensure the writer doesn't emit a YAML Start-of-Document marker
            yaml_writer.explicit_start = False
        else:
            # Ensure block style output
            Parsers.set_flow_style(self.data, False)

            # When writing YAML, ensure the document start mark is emitted
            yaml_writer.explicit_start = True

        return OutputDocTypes.JSON if is_flow else OutputDocTypes.YAML
Esempio n. 7
0
    def test_jsonify_complex_python_data(self):
        cdata = {
            "dates": [
                dt.date(2020, 10, 31),
                dt.date(2020, 11, 3)
            ],
            "bytes": b"abc"
        }
        jdata = Parsers.jsonify_yaml_data(cdata)
        assert jdata["dates"][0] == "2020-10-31"
        assert jdata["dates"][1] == "2020-11-03"

        jstr = json.dumps(jdata)
        assert jstr == """{"dates": ["2020-10-31", "2020-11-03"], "bytes": "b'abc'"}"""
Esempio n. 8
0
def main():
    """Main code."""
    args = processcli()
    log = ConsolePrinter(args)
    validateargs(args, log)
    yaml_path = YAMLPath(args.query, pathsep=args.pathsep)

    # Prep the YAML parser
    yaml = Parsers.get_yaml_editor()

    # Attempt to open the YAML file; check for parsing errors
    (yaml_data, doc_loaded) = Parsers.get_yaml_data(
        yaml, log, args.yaml_file if args.yaml_file else "-")
    if not doc_loaded:
        # An error message has already been logged
        sys.exit(1)

    # Seek the queried value(s)
    discovered_nodes = []
    processor = EYAMLProcessor(log,
                               yaml_data,
                               binary=args.eyaml,
                               publickey=args.publickey,
                               privatekey=args.privatekey)
    try:
        for node in processor.get_eyaml_values(yaml_path, mustexist=True):
            log.debug("Got node from {}:".format(yaml_path),
                      data=node,
                      prefix="yaml_get::main:  ")
            discovered_nodes.append(NodeCoords.unwrap_node_coords(node))
    except YAMLPathException as ex:
        log.critical(ex, 1)
    except EYAMLCommandException as ex:
        log.critical(ex, 2)

    try:
        for node in discovered_nodes:
            if isinstance(node, (dict, list, CommentedSet)):
                print(json.dumps(Parsers.jsonify_yaml_data(node)))
            else:
                if node is None:
                    node = "\x00"
                print("{}".format(str(node).replace("\n", r"\n")))
    except RecursionError:
        log.critical(
            "The YAML data contains an infinitely recursing YAML Alias!", 1)
Esempio n. 9
0
def write_output_document(args, log, yaml, yaml_data):
    """Write the updated document to file or STDOUT."""
    # Save a backup of the original file, if requested
    backup_file = args.yaml_file + ".bak"
    if args.backup:
        log.verbose("Saving a backup of {} to {}.".format(
            args.yaml_file, backup_file))
        if exists(backup_file):
            remove(backup_file)
        copy2(args.yaml_file, backup_file)

    # Save the changed file
    if args.yaml_file.strip() == "-":
        if write_document_as_yaml(args.yaml_file, yaml_data):
            yaml.dump(yaml_data, sys.stdout)
        else:
            json.dump(Parsers.jsonify_yaml_data(yaml_data), sys.stdout)
    else:
        save_to_file(args, log, yaml, yaml_data, backup_file)
Esempio n. 10
0
def save_to_json_file(args, log, yaml_data):
    """Save to a JSON file."""
    log.verbose("Writing changed data as JSON to {}.".format(args.yaml_file))
    with open(args.yaml_file, 'w') as out_fhnd:
        json.dump(Parsers.jsonify_yaml_data(yaml_data), out_fhnd)