Ejemplo n.º 1
0
def render(template_path: str, values: dict, engine: Environment) -> str:
    output = engine.get_template(template_path).render(values)

    yaml = YAML(typ='safe', pure=True)

    try:
        yaml.load_all(output)
    except (yaml.scanner.ScannerError, yaml.parser.ParserError) as err:
        raise YamlValidationError(err) from err

    return output
Ejemplo n.º 2
0
def parse(data, raw=False, quiet=False):
    """
    Main text parsing function

    Parameters:

        data:        (string)  text data to parse
        raw:         (boolean) output preprocessed JSON if True
        quiet:       (boolean) suppress warning messages if True

    Returns:

        List of dictionaries. Raw or processed structured data.
    """
    if not quiet:
        jc.utils.compatibility(__name__, info.compatible)

    raw_output = []

    if jc.utils.has_data(data):

        yaml = YAML(typ='safe')

        for document in yaml.load_all(data):
            raw_output.append(document)

    if raw:
        return raw_output
    else:
        return process(raw_output)
Ejemplo n.º 3
0
def validate_files(
    schemafile: PathLike,
    *datafiles: PathLike,
    encoding: str = "utf-8",
) -> None:
    r"""
	Validate the given datafiles against the given schema.

	:param schemafile: The ``json`` or ``yaml`` formatted schema to validate with.
	:param \*datafiles: The ``json`` or ``yaml`` files to validate.
	:param encoding: Encoding to open the files with.

	.. versionadded:: 0.4.0
	"""

    schemafile = pathlib.Path(schemafile)

    yaml = YAML(typ="safe", pure=True)
    schema = yaml.load(schemafile.read_text(encoding=encoding))

    for filename in datafiles:
        for document in yaml.load_all(
                pathlib.Path(filename).read_text(encoding=encoding)):
            try:
                jsonschema.validate(document,
                                    schema,
                                    format_checker=jsonschema.FormatChecker())
            except jsonschema.exceptions.ValidationError as e:
                e.filename = str(filename)
                raise e
Ejemplo n.º 4
0
def load_config(config_file_path):
    """should raise ScannerError if there is a problem in the yaml file."""
    yaml = YAML(typ='unsafe', pure=True)
    p = Path(config_file_path)
    parsed = yaml.load_all(p)
    data = next(parsed)
    return DefaultBear(None, **data)
Ejemplo n.º 5
0
def main():
    opts = getArgs()
    for o, a in opts:
        if o in ("-h", "--help"):
            usage()
            sys.exit()
        elif o in ("-k", "--key"):
            key_to_update = a
        elif o in ("-v", "--value"):
            value_to_update = a
        else:
            assert False, "unhandled option"

    yaml = YAML()
    yaml.explicit_start = True
    yaml.allow_unicode = True
    yaml.width = 300

    data_list = key_to_update.split(".")
    data_to_refer = value_to_update
    for k in data_list[::-1]:
        data_to_refer = {k: data_to_refer}

    result = []
    for data in list(yaml.load_all(sys.stdin)):
        if data is not None:
            data = update_data(data, data_to_refer)
            result.append(data)
    yaml.dump_all(result, sys.stdout)
Ejemplo n.º 6
0
def main():
    yaml = YAML()
    file_name = sys.argv[1]
    file_in = open(file_name).read()
    docs = yaml.load_all(file_in)
    i = 0
    for doc in docs:

        if i == 0:
            code_old = doc
        else:
            code_new = doc
        i = i + 1
    # 保存变化后的值,由set_map_item设置
    delta_map = CommentedMap()
    # 每一次traversal的递归遍历处理过的key,每一次递归完成回到root层时该key将会清空
    follow_keys = list()
    # 此次对比增加的key
    add = list()
    # 此次对比更新的key
    update = list()
    traversal(code_old, code_new, follow_keys, delta_map, update, add)
    yaml.dump(code_old, sys.stdout)

    split = '------love----you------choerodon----'

    print(split)
    yaml.dump(delta_map, sys.stdout)

    print(split)
    change_key_map = dict()

    change_key_map["add"] = add
    change_key_map["update"] = update
    yaml.dump(change_key_map, sys.stdout)
Ejemplo n.º 7
0
def main():
    yaml = YAML()
    file_name = sys.argv[1]
    file_in = open(file_name).read()
    docs = yaml.load_all(file_in)
    i = 0
    for doc in docs:

        if i == 0:
            code_old = doc
        else:
            code_new = doc
        i = i + 1
    delta_map = dict()
    follow_keys = list()

    add = list()
    update = list()
    traversal(code_old, code_new, follow_keys, delta_map, update, add)
    yaml.dump(code_old, sys.stdout)

    split = '------love----you------choerodon----'

    print(split)
    yaml.dump(delta_map, sys.stdout)

    print(split)
    change_key_map = dict()

    change_key_map["add"] = add
    change_key_map["update"] = update
    yaml.dump(change_key_map, sys.stdout)
Ejemplo n.º 8
0
def parse(data, raw=False, quiet=False):
    """
    Main text parsing function

    Parameters:

        data:        (string)  text data to parse
        raw:         (boolean) output preprocessed JSON if True
        quiet:       (boolean) suppress warning messages if True

    Returns:

        List of Dictionaries representing the YAML documents.
    """
    if not quiet:
        jc.utils.compatibility(__name__, info.compatible)

    raw_output = []

    if jc.utils.has_data(data):

        # monkey patch to disable plugins since we don't use them and in
        # ruamel.yaml versions prior to 0.17.0 the use of __file__ in the
        # plugin code is incompatible with the pyoxidizer packager
        YAML.official_plug_ins = lambda a: []

        yaml = YAML(typ='safe')

        for document in yaml.load_all(data):
            raw_output.append(document)

    if raw:
        return raw_output
    else:
        return process(raw_output)
def process_complete_schema(config, config_schema_file, defaults_file,
                            meta_info_file):
    yaml = YAML()

    config_schema_file = open(config_schema_file, "r")
    defaults_file = open(defaults_file, "r")
    meta_info_file = open(meta_info_file, "r")

    # Config schema has expected schema and all the expandable types
    config_schema = yaml.load_all(config_schema_file)
    defaults = yaml.load(defaults_file)
    meta_info = yaml.load(meta_info_file)

    expected_params = None
    expandable_types = {}

    for yaml_doc in config_schema:
        for key in yaml_doc:
            if key == "expected":
                expected_params = yaml_doc[key]

            else:
                expandable_types[key] = yaml_doc[key]

    if expected_params is None:
        message = "Config schema needs to have an expected params section"
        raise Exception(message)

    return get_final_config(config, expected_params, defaults, meta_info,
                            expandable_types)
Ejemplo n.º 10
0
def main():
    opts = getArgs()
    for o, a in opts:
        if o in ("-h", "--help"):
            usage()
            sys.exit()
        elif o in ("-k", "--key"):
            d_key = a
        elif o in ("-v", "--value"):
            d_value = a
        else:
            assert False, "unhandled option"
    yaml = YAML()
    yaml.explicit_start = True
    yaml.allow_unicode = True
    yaml.width = 300
    result = []
    for data in list(yaml.load_all(sys.stdin)):
        if data is not None:
            if (data['kind'] == "ConfigMap") or \
               (data['kind'] == "Secret"):
                # update data: key=value
                data['data'][d_key] = d_value
                result.append(data)
            elif 'kind' in data.keys():
                result.append(data)
    yaml.dump_all(result, sys.stdout)
Ejemplo n.º 11
0
def main():
    yaml = YAML()
    file_name = sys.argv[1]
    file_in = open(file_name).read()
    docs = yaml.load_all(file_in)
    i = 0
    for doc in docs:

        if i == 0:
            code_old = doc
        else:
            code_new = doc
        i = i + 1
    delta_map = dict()
    follow_keys = list()

    add = list()
    update = list()
    traversal(code_old, code_new, follow_keys, delta_map, update, add)
    yaml.dump(code_old, sys.stdout)

    split = '------love----you------choerodon----'

    print(split)
    yaml.dump(delta_map, sys.stdout)

    print(split)
    change_key_map = dict()

    change_key_map["add"] = add
    change_key_map["update"] = update
    yaml.dump(change_key_map, sys.stdout)
Ejemplo n.º 12
0
def load_multi_doc_file (file_data):
    yaml=YAML(typ='safe')
    try:
        multi_doc_load = yaml.load_all(file_data)
    except yaml.YAMLError as yaml_error:
        print(yaml_error)
        sys.exit()
    return multi_doc_load
Ejemplo n.º 13
0
 def _parse_yaml(self, yaml_content):
     try:
         yaml = YAML()
         resources = list(yaml.load_all(yaml_content))
     except Exception as e:
         raise ParseError(f'Parse manifest failed: \n{e}\n\nManifest content:\n{yaml_content}')
     else:
         return resources
Ejemplo n.º 14
0
async def differ(ctx, path):
    """Clear the bring cache dir in the relevant locaiont (e.g. '~/.cache/bring' on Linux)."""

    si = SmartInput(path)
    content = await si.content_async()

    yaml = YAML()
    dict_content_orig = list(yaml.load_all(content))

    print(dict_content_orig)
    new_content = click.edit(content)

    dict_content_new = list(yaml.load_all(new_content))

    dict_diff = diff(dict_content_orig, dict_content_new)
    dict_diff = list(dict_diff)
    print(dict_diff)
Ejemplo n.º 15
0
def safe_load_all(stream):
    yaml = YAML()
    yaml.default_flow_style = False
    docs = list(yaml.load_all(stream))
    conv_docs = []
    for doc in docs:
        conv_docs.append(dict_to_objdict(doc))
    return conv_docs
Ejemplo n.º 16
0
 def _parse_yaml(self, yaml_content):
     try:
         yaml = YAML()
         resources = list(yaml.load_all(yaml_content))
     except Exception as e:
         raise ParseError(f'Parse manifest failed: \n{e}\n\nManifest content:\n{yaml_content}')
     else:
         # ordereddict to dict
         return json.loads(json.dumps(resources))
Ejemplo n.º 17
0
    def docs(self, path):
        from ruamel.yaml import YAML

        tyaml = YAML(typ='safe', pure=True)
        tyaml.register_class(YAMLData)
        tyaml.register_class(Python)
        tyaml.register_class(Output)
        tyaml.register_class(Assert)
        return list(tyaml.load_all(path))
def yamale_converter(config_schema_file, yamale_file, lc_schema_files):
    yaml = YAML()

    config_schema_file = open(config_schema_file, "r")
    config_schema = yaml.load_all(config_schema_file)

    schema = None
    expandable_types = {}

    for yaml_doc in config_schema:
        for key, value in yaml_doc.items():
            if key == "expected":
                schema = value

            else:
                expandable_types[key] = value

    with open(yamale_file, "w") as yamale:
        schema = translate(schema, expandable_types)

        yaml.dump(schema, yamale)

        # Add types defined in schema file
        for key, value in expandable_types.items():
            final = translate({key: value}, expandable_types)

            if key == "lightweight_component":
                lc_schema_filenames = [
                    lc_schema.split("/")[-1].replace(".yaml", "")
                    for lc_schema in lc_schema_files
                ]

                lc_schemas = [
                    "include('{}')".format(lc_schema.replace(".yaml", ""))
                    for lc_schema in lc_schema_filenames
                ]

                any_schemas = "any({})".format(", ".join(lc_schemas))

                final[key]["config"] = any_schemas

            yamale.write("---\n")
            yaml.dump(final, yamale)

        # Add lc_schemas
        for lc_schema_file in lc_schema_files:
            lc_schema = yaml.load(open(lc_schema_file, "r"))

            lc_schema = lc_schema["expected-from-site-level-config"]

            lc_schema_name = lc_schema_file.split("/")[-1].replace(".yaml", "")

            final = translate({lc_schema_name: lc_schema}, expandable_types)

            yamale.write("---\n")
            yaml.dump(final, yamale)
Ejemplo n.º 19
0
    async def patch(self, full_path: str, patch_set: Mapping) -> None:

        si = SmartInput(full_path)
        content = await si.content_async()
        yaml = YAML()
        dict_content = yaml.load_all(content)
        new_content = patch(patch_set, list(dict_content))

        with open(full_path, "w") as f:
            yaml.dump_all(new_content, f)
Ejemplo n.º 20
0
 def _add_policies(self, doc, file_name):
     yaml1 = YAML()
     code = yaml1.load_all(doc)
     for policy_list in code:
         if isinstance(policy_list, dict):
             self._add_policies_to_parse_queue(policy_list.get('items', []), file_name)
         else:  # we got a list of lists, e.g., when combining calico np, gnp and profiles
             for policy_list_list in policy_list:
                 if isinstance(policy_list_list, dict):
                     self._add_policies_to_parse_queue(policy_list_list.get('items', []), file_name)
Ejemplo n.º 21
0
def get_dicts_from_yaml(in_f, encoding='utf-8'):
    """
    Читает список словарей из YAML-файл.
    :param in_f: YAML-файл
    :param encoding: кодировка YAML-файла
    :return: генератор списка, содержащего словари
    """
    yaml = YAML(pure=True)
    with open(Path(in_f), encoding=encoding) as f_in:
        for item in yaml.load_all(f_in):
            yield dict(item)
Ejemplo n.º 22
0
    def fetch_ruleset(subreddit, page):

        raw_content = RedditManagerUtils.RedditManager.getWikiContent(
            subreddit, page)

        yaml = YAML(typ='safe')

        output = list(yaml.load_all(raw_content))

        new_ruleset = RulesManager._parse_rules(output, subreddit)

        RulesManager.__subreddit_rulesets[subreddit] = new_ruleset
Ejemplo n.º 23
0
def scan_kustomization_for_images(kust_dir):
    """Scan kustomization folder and produce a list of images

    Args:
      kust_dir (str): Path where the kustomize application resides.
    """

    yaml = YAML()
    yaml.block_seq_indent = 0

    # Load kustomization
    with open(path.join(kust_dir, "kustomization.yaml")) as f:
        try:
            kustomization = yaml.load(f)
        except Exception as e:
            log.error("Error loading kustomization in %s: %s", kust_dir, e)
            raise (e)

    # Get current image list from kustomization
    img_list = kustomization.get("images", [])

    # Get local resource files
    (_, _, filenames) = next(walk(kust_dir))
    filenames = [
        filename for filename in filenames
        if filename != "kustomization.yaml" and filename != "params.yaml" and
        filename.endswith(".yaml")
    ]

    for filename in filenames:
        with open(path.join(kust_dir, filename)) as f:
            resources = list(yaml.load_all(f))
        for r in resources:
            if not isinstance(r, Mapping):
                continue
            if r.get("kind", "").lower() in accepted_kinds:
                try:
                    containers = r["spec"]["template"]["spec"]["containers"]
                except KeyError:
                    continue
                for c in containers:
                    try:
                        img_str = c["image"]
                    except KeyError:
                        continue
                    new_img = image_from_string(img_str)
                    append_or_update(img_list, new_img)

    if img_list:
        kustomization["images"] = img_list
        with open(path.join(kust_dir, "kustomization.yaml"), "w") as f:
            yaml.dump(kustomization, f)
Ejemplo n.º 24
0
    def test_multi_document_load(self, tmpdir):
        """this went wrong on 3.7 because of StopIteration, PR 37 and Issue 211"""
        from ruamel.yaml import YAML

        fn = Path(str(tmpdir)) / 'test.yaml'
        fn.write_text(
            textwrap.dedent(u"""\
            ---
            - a
            ---
            - b
            ...
            """))
        yaml = YAML()
        assert list(yaml.load_all(fn)) == [['a'], ['b']]
Ejemplo n.º 25
0
def main():
    yaml = YAML()
    yaml.explicit_start = True
    yaml.allow_unicode = True
    yaml.width = 300
    result = []
    for data in list(yaml.load_all(sys.stdin)):
        if data is not None:
            if data['kind'] == 'Secret':
                for k, v in data['data'].items():
                    data['data'][k] = base64.b64encode(
                        v.encode('utf-8')).decode('utf-8')
                result.append(data)
            elif 'kind' in data.keys():
                result.append(data)
    yaml.dump_all(result, sys.stdout)
Ejemplo n.º 26
0
    def from_yaml(file):
        """Creates a new bibliography from a YAML source file.

        Args:
            file (file): path to YAML file from which to load database.

        Returns:
            An OrderedDict containing the bibliography as per the provided YAML file.
        """
        yaml = YAML()
        bib = OrderedDict()
        LOGGER.debug('Loading YAML data from file: %s.', file)
        for entry in yaml.load_all(file):
            for label, data in entry.items():
                bib[label] = Entry(label, data)
        return bib
Ejemplo n.º 27
0
def main():
    opts = getArgs()
    for o, a in opts:
        if o in ("-h", "--help"):
            usage()
            sys.exit()
        elif o in ("-t", "--type"):
            c_type = a
        elif o in ("-n", "--name"):
            c_name = a
        elif o in ("-i", "--image"):
            c_image = a
        else:
            assert False, "unhandled option"
    yaml = YAML()
    yaml.explicit_start = True
    yaml.allow_unicode = True
    yaml.width = 300
    result = []
    for data in list(yaml.load_all(sys.stdin)):
        if data is not None:
            if (data['kind'] == "Deployment") or \
               (data['kind'] == "DaemonSet") or \
               (data['kind'] == "StatefulSet"):
                # update image
                container_ls = [
                    c['name'] for c in data['spec']['template']['spec'][c_type]
                ]
                for n, c in enumerate(container_ls):
                    if c == c_name:
                        data['spec']['template']['spec'][c_type][n][
                            'image'] = c_image
                result.append(data)
            elif data['kind'] == "CronJob":
                # update image
                container_ls = [
                    c['name'] for c in data['spec']['jobTemplate']['spec']
                    ['template']['spec'][c_type]
                ]
                for n, c in enumerate(container_ls):
                    if c == c_name:
                        data['spec']['jobTemplate']['spec']['template'][
                            'spec'][c_type][n]['image'] = c_image
                result.append(data)
            elif 'kind' in data.keys():
                result.append(data)
    yaml.dump_all(result, sys.stdout)
Ejemplo n.º 28
0
    def __init__(self, path = "./configs/tune/example.yaml"):
        """Loads and prepares the specified config file.
        
        Keyword Arguments:
            path {str} -- Yaml file path to the to be loaded config (default: {"./configs/tune/search.yaml"})
        """
        # Load the config file
        stream = open(path, "r")
        yaml = YAML()
        yaml_args = yaml.load_all(stream)
        
        # Final contents of the config file will be added to a dictionary
        self._config = {}

        # Prepare data
        for data in yaml_args:
            self._config = dict(data)
Ejemplo n.º 29
0
    def setup(protocol: str = 'protocol.yml'):
        # TODO: Check Correctness
        # needs: Transition to end, no unreachable nodes, no unknown task types
        # Build database
        # Handle api calls and predictions

        automaton = AnnotationAutomaton()

        with open(protocol) as f:
            yaml = YAML(typ='safe')
            protocol: dict = next(yaml.load_all(f))

            for state, val in protocol.items():

                for transition in val['transitions']:
                    for trigger in transition:
                        target = transition[trigger]['target']
                        actions = (transition[trigger]['actions'] if 'actions'
                                   in transition[trigger] else None)
                        automaton.add_transition(trigger,
                                                 state,
                                                 target,
                                                 before=actions)

                del val['transitions']
                on_exit = ['save']
                # on_exit.extend(['save'] if 'column' in val else [])
                automaton.add_state(
                    State(name=state, meta=val, on_exit=on_exit))

        automaton.add_transition('fail', '*', 'failure')
        automaton.add_transition('to_start', '*', 'start')
        # print(automaton.states)
        # print(automaton.get_transitions())
        illegal = automaton.check_machine_validity()
        if len(illegal['unreachable']) != 0 and len(illegal['undefined']) != 0:
            raise error_handler.AutomatonError(
                f'Unreachable states: {illegal["unreachable"]} and Undefined states: {illegal["undefined"]}'
            )
        elif len(illegal['unreachable']) != 0:
            raise error_handler.AutomatonError(
                f'Unreachable states: {illegal["unreachable"]}')
        elif len(illegal['undefined']) != 0:
            raise error_handler.AutomatonError(
                f'Undefined states: {illegal["undefined"]}')
        return automaton
Ejemplo n.º 30
0
Archivo: ded.py Proyecto: vduseev/ded
def run():
    args = _parse()
    keys = args.keys
    if not keys:
        keys = ["kind", "metadata.name"],

    yaml = YAML()
    input_docs = yaml.load_all(sys.stdin)
    unique_docs = dict()

    for doc in input_docs:

        # Determine ID of earch supplied document
        # based on the keys
        ids = []
        for key in keys:
            # Split each key into the hierarchy of subkeys
            key_parts = key.split(".")

            try:
                # Start iterating with the top-most subkey
                value = doc[key_parts[0]]
                # Go deeper into the lower nested subkeys
                for key_part in key_parts[1:]:
                    # until values is obtainer
                    value = value[key_part]
                ids.append(str(value))

            except KeyError as ke:
                print(
                    f"Supplied document does not have required key {ke}",
                    file=sys.stderr,
                )
                print("Failed document is:", file=sys.stderr)
                yaml.dump(doc, sys.stderr)
                exit(1)

        # If document with such ID does not yet exist in our
        # collection, then add it
        id = "-".join(ids)
        if id not in unique_docs:
            unique_docs[id] = doc

    yaml.dump_all(unique_docs.values(), sys.stdout)
Ejemplo n.º 31
0
def main():
    opts = getArgs()
    for o, a in opts:
        if o in ("-h", "--help"):
            usage()
            sys.exit()
        elif o in ("-t", "--tag"):
            tag = a
        else:
            assert False, "unhandled option"
    yaml = YAML()
    yaml.explicit_start = True
    yaml.allow_unicode = True
    yaml.width = 300
    result = []
    for data in list(yaml.load_all(sys.stdin)):
        if data is not None:
            if (data['kind'] == "Deployment") or \
               (data['kind'] == "DaemonSet") or \
               (data['kind'] == "StatefulSet"):
                # add label: tag
                data['spec']['template']['metadata']['labels']['tag'] = tag
                # add label: imageName.n, imageVersion.n
                image_ls = [c['image'] for c in data['spec']['template']['spec']['containers']]
                for n,i in enumerate(image_ls):
                    i_name = i.split(':')[0][i.split(':')[0].rfind('/')+1:]
                    i_version = i.split(':')[1]
                    data['spec']['template']['metadata']['labels']['imageName.' + str(n)] = i_name
                    data['spec']['template']['metadata']['labels']['imageVersion.' + str(n)] = i_version
                result.append(data)
            elif data['kind'] == "CronJob":
                # add label: tag
                data['spec']['jobTemplate']['spec']['template']['metadata']['labels']['tag'] = tag
                # add label: imageName.n, imageVersion.n
                image_ls = [c['image'] for c in data['spec']['jobTemplate']['spec']['template']['spec']['containers']]
                for n,i in enumerate(image_ls):
                    i_name = i.split(':')[0][i.split(':')[0].rfind('/')+1:]
                    i_version = i.split(':')[1]
                    data['spec']['jobTemplate']['spec']['template']['metadata']['labels']['imageName.' + str(n)] = i_name
                    data['spec']['jobTemplate']['spec']['template']['metadata']['labels']['imageVersion.' + str(n)] = i_version
                result.append(data)
            elif 'kind' in data.keys():
                result.append(data)
    yaml.dump_all(result, sys.stdout)