Пример #1
0
    def _to_text(self, filename=None, is_json=False):
        """Serialize to a json/yaml file"""
        extra_data = {} if self.extra_data is None else self.extra_data

        def cell_value(a_cell):
            if a_cell.formula and a_cell.formula.python_code:
                return '=' + a_cell.formula.python_code
            else:
                return a_cell.value

        extra_data.update(dict(
            excel_hash=self._excel_file_md5_digest,
            cell_map=dict(sorted(
                ((addr, cell_value(cell))
                 for addr, cell in self.cell_map.items() if cell.serialize),
                key=lambda x: AddressRange(x[0]).sort_key
            )),
        ))
        if not filename:
            filename = self.filename + ('.json' if is_json else '.yml')

        # hash the current file to see if this function makes any changes
        existing_hash = (self._compute_file_md5_digest(filename)
                         if os.path.exists(filename) else None)

        if not is_json:
            with open(filename, 'w') as f:
                ymlo = YAML()
                ymlo.width = 120
                ymlo.dump(extra_data, f)
        else:
            with open(filename, 'w') as f:
                json.dump(extra_data, f, indent=4)

        del extra_data['cell_map']

        # hash the newfile, return True if it changed, this is only reliable
        # on pythons which have ordered dict (CPython 3.6 & python 3.7+)
        return (existing_hash is None or
                existing_hash != self._compute_file_md5_digest(filename))
Пример #2
0
from typing import Union

import demisto_sdk.commands.common.content.errors as exc
from ruamel.yaml import YAML
from ruamel.yaml.scanner import ScannerError
from wcmatch.pathlib import EXTGLOB, NEGATE, Path

from .dictionary_based_object import DictionaryBasedObject

RUYAML = YAML(typ='rt')
RUYAML.preserve_quotes = True  # type: ignore
RUYAML.width = 50000  # type: ignore


class YAMLObject(DictionaryBasedObject):
    def __init__(self, path: Union[Path, str], file_name_prefix: str = ""):
        super().__init__(path=path, file_name_prefix=file_name_prefix)

    @staticmethod
    def _fix_path(path: Union[Path, str]):
        """Find and validate object path is valid.

        Rules:
            1. Path exists.
            2. One of the following options:
                a. Path is a file.
                b. Path is directory and file with a yml/yaml suffix exists in the given directory.
            3. File suffix equal "yml" or "yaml".

        Returns:
            Path: valid file path.
Пример #3
0
def _is_recipe_solvable_on_platform(recipe_dir, cbc_path, platform, arch):
    # parse the channel sources from the CBC
    parser = YAML(typ="jinja2")
    parser.indent(mapping=2, sequence=4, offset=2)
    parser.width = 320

    with open(cbc_path, "r") as fp:
        cbc_cfg = parser.load(fp.read())

    if "channel_sources" in cbc_cfg:
        channel_sources = cbc_cfg["channel_sources"][0].split(",")
    else:
        channel_sources = ["conda-forge", "defaults", "msys2"]

    if "msys2" not in channel_sources:
        channel_sources.append("msys2")

    logger.debug("MAMBA: using channels %s on platform-arch %s-%s",
                 channel_sources, platform, arch)

    # here we extract the conda build config in roughly the same way that
    # it would be used in a real build
    config = conda_build.config.get_or_merge_config(
        None,
        exclusive_config_file=cbc_path,
        platform=platform,
        arch=arch,
    )
    cbc, _ = conda_build.variants.get_package_combined_spec(recipe_dir,
                                                            config=config)

    # now we render the meta.yaml into an actual recipe
    metas = conda_build.api.render(
        recipe_dir,
        platform=platform,
        arch=arch,
        ignore_system_variants=True,
        variants=cbc,
        permit_undefined_jinja=True,
        finalize=False,
        bypass_env_check=True,
        channel_urls=channel_sources,
    )

    # now we loop through each one and check if we can solve it
    # we check run and host and ignore the rest
    mamba_solver = _mamba_factory(tuple(channel_sources),
                                  "%s-%s" % (platform, arch))

    solvable = True
    for m, _, _ in metas:
        host_req = (m.get_value('requirements/host', [])
                    or m.get_value('requirements/build', []))
        solvable &= mamba_solver.solve(host_req)

        run_req = m.get_value('requirements/run', [])
        solvable &= mamba_solver.solve(run_req)

        tst_req = (m.get_value('test/requires', []) +
                   m.get_value('test/requirements', []) + run_req)
        solvable &= mamba_solver.solve(tst_req)

    return solvable
Пример #4
0
from logging import getLogger

logger = getLogger(__name__)

from knack.util import CLIError
from knack.help_files import helps
from azure.cli.core.mock import DummyCli
from azure.cli.core.util import get_installed_cli_distributions
from azure.cli.core._help import CliCommandHelpFile, CliGroupHelpFile
from azure.cli.core.file_util import _store_parsers, _is_group

try:
    from ruamel.yaml import YAML
    yaml = YAML()
    yaml.width = 1000 # prevents wrapping around in dumper.
    yaml.allow_duplicate_keys = True # TODO: allow duplicate keys within help entries. see az container create. Remove this.
except ImportError as e:
    msg = "{}\npip install ruamel.Yaml to use this script.".format(e)
    exit(msg)

PACKAGE_PREFIX = "azure.cli.command_modules"
CLI_PACKAGE_NAME = 'azure-cli'
COMPONENT_PREFIX = 'azure-cli-'

failed = 0

loaded_helps = {}

def get_all_help(cli_ctx):
    invoker = cli_ctx.invocation
Пример #5
0
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from typing import Optional
from abc import ABC
import pkg_resources
import logging

from yarl import URL
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap

from .base import BaseConfig
from .recursive_dict import RecursiveDict

yaml = YAML()
yaml.indent(4)
yaml.width = 200

log: logging.Logger = logging.getLogger("mau.util.config")


class BaseFileConfig(BaseConfig, ABC):
    def __init__(self, path: str, base_path: str) -> None:
        super().__init__()
        self._data = CommentedMap()
        self.path: str = path
        self.base_path: str = base_path

    def load(self) -> None:
        with open(self.path, 'r') as stream:
            self._data = yaml.load(stream)
Пример #6
0
# -*- coding: utf-8 -*-

from ruamel.yaml import YAML

yaml = YAML()
yaml.width = 4096

from slugify import slugify as slugify_
import re

STOPWORDS = [
    'a',
    'au',
    'd',
    'de',
    'des',
    'du',
    'et',
    'l',
    'la',
    'le',
    'les',
    'ou',
    'pour',
    'sur',
    'un',
]


def represent_none(self, data):
    return self.represent_scalar(u'tag:yaml.org,2002:null', u'null')
Пример #7
0
    def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict",
                **kwargs: Any) -> None:
        with indir(recipe_dir):
            mapping = {}
            groups = {}
            with open("meta.yaml", "r") as fp:
                lines = []
                for line in fp.readlines():
                    lines.append(_munge_line(line, mapping, groups))

            yaml = YAML(typ="jinja2")
            yaml.indent(mapping=2, sequence=4, offset=2)
            yaml.width = 120

            meta = yaml.load("".join(lines))

            if not _has_key_selector(meta, "outputs"):
                for key, _ in _gen_keys_selector(meta, "test"):
                    _adjust_test_dict(
                        meta,
                        key,
                        mapping,
                        groups,
                        parent_group=groups.get(key, None),
                    )
            else:
                # do top level
                has_python = False
                for _, val in _gen_keys_selector(meta, "requirements"):
                    for _key, reqs in _gen_keys_selector(val, "host"):
                        has_python |= _has_python_in_host(reqs)

                has_test_imports = False
                for _, val in _gen_keys_selector(meta, "test"):
                    has_test_imports |= _has_key_selector(val, "imports")

                if has_python or has_test_imports:
                    for key, _ in _gen_keys_selector(meta, "test"):
                        _adjust_test_dict(
                            meta,
                            key,
                            mapping,
                            groups,
                            parent_group=groups.get(key, None),
                        )

                # now outputs
                for _, outputs in _gen_keys_selector(meta, "outputs"):
                    for output in outputs:
                        has_python = False
                        for _, val in _gen_keys_selector(
                                output, "requirements"):
                            for _key, reqs in _gen_keys_selector(val, "host"):
                                has_python |= _has_python_in_host(reqs)

                        if has_python:
                            for key, _ in _gen_keys_selector(output, "test"):
                                _adjust_test_dict(
                                    output,
                                    key,
                                    mapping,
                                    groups,
                                    parent_group=groups.get(key, None),
                                )

            with open("meta.yaml", "w") as fp:
                yaml.dump(meta, fp)

            # now undo mapping
            with open("meta.yaml", "r") as fp:
                lines = []
                for line in fp.readlines():
                    lines.append(_unmunge_line(line, mapping))

            with open("meta.yaml", "w") as fp:
                for line in lines:
                    fp.write(line)
Пример #8
0
            index = affiliation_index.get(affil, len(affiliation_index) + 1)
            affiliation_index[affil] = index
            new_affils.append(index)
    auth['affiliation'] = ', '.join(map(str, new_affils))

# Build the affiliations section from the index
affiliations = []
for name, index in affiliation_index.items():
    affiliations.append({'name': name, 'index': index})

data['affiliations'] = affiliations

# Dump the new yaml to a string
s = io.StringIO()
yaml.default_flow_style = False
yaml.width = 1000
yaml.allow_unicode = True
yaml.dump(data, s)
s.seek(0)

# Overwrite the paper markdown with the new metadata
with open("paper.md") as fd:
    lines = fd.readlines()

start_line = lines.index("---\n") + 1
end_line = lines.index("...\n")

out_lines = lines[:start_line]
out_lines += s.readlines()
out_lines += lines[end_line:]
Пример #9
0
heluxup is able to parse the flux control repository for HelmReleases and checks if updates of the charts are
available. If updates are available heluxup updates the yaml files in the flux control respositroy accordingly.
"""

import os

import click
import semver
import urllib3
from ruamel.yaml import YAML

HTTP = urllib3.PoolManager()
YAML_PARSER = YAML()
YAML_PARSER.preserve_quotes = True
YAML_PARSER.explicit_start = True
YAML_PARSER.width = 8000
REPO_CACHE = {}


class HelmRelease:
    """
    HelmRelease represents a HelmRelease object that is used by flux.
    """
    def __init__(self, release_name, chart_name, repository, git_version):
        self.release_name = release_name
        self.chart_name = chart_name
        self.repository = repository.rstrip('/')
        self.git_version = git_version

        if self.repository not in REPO_CACHE:
            repo_yaml = HTTP.request('GET',
Пример #10
0
import string

from passlib.hash import sha512_crypt
from ruamel.yaml import YAML

SECRETS_ALL = {'keystone_admin_password': '******'}

SECRETSFILE_INPUT = 'environments/kolla/secrets.yml'
SECRETSFILE_OUTPUT_ALL = 'environments/secrets.yml'
SECRETSFILE_OUTPUT_KOLLA = 'environments/kolla/secrets.yml'

yaml = YAML()
yaml.explicit_start = True
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.preserve_quotes = True
yaml.width = float("inf")

with open(SECRETSFILE_INPUT) as fp:
    secrets_input = yaml.load(fp)

with open(SECRETSFILE_OUTPUT_ALL) as fp:
    secrets_output_all = yaml.load(fp)

with open(SECRETSFILE_OUTPUT_KOLLA) as fp:
    secrets_output_kolla = yaml.load(fp)

for key in SECRETS_ALL.keys():
    secrets_output_all[key] = secrets_input[SECRETS_ALL[key]]

if 'ceph_cluster_fsid' in secrets_output_kolla:
    del (secrets_output_kolla['ceph_cluster_fsid'])
Пример #11
0
from logging import getLogger

logger = getLogger(__name__)

from knack.util import CLIError
from knack.help_files import helps
from azure.cli.core.mock import DummyCli
from azure.cli.core.util import get_installed_cli_distributions
from azure.cli.core._help import CliCommandHelpFile, CliGroupHelpFile
from azure.cli.core.file_util import _store_parsers, _is_group

try:
    from ruamel.yaml import YAML
    yaml = YAML()
    yaml.width = 1000  # prevents wrapping around in dumper.
    yaml.allow_duplicate_keys = True  # TODO: allow duplicate keys within help entries. see az container create. Remove this.
except ImportError as e:
    msg = "{}\npip install ruamel.Yaml to use this script.".format(e)
    exit(msg)

PACKAGE_PREFIX = "azure.cli.command_modules"
CLI_PACKAGE_NAME = 'azure-cli'
COMPONENT_PREFIX = 'azure-cli-'

failed = 0

loaded_helps = {}


def get_all_help(cli_ctx):
Пример #12
0
def pretty_format_yaml(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--autofix',
        action='store_true',
        dest='autofix',
        help='Automatically fixes encountered not-pretty-formatted files',
    )
    parser.add_argument(
        '--indent',
        type=int,
        default='2',
        help=(
            'The number of indent spaces or a string to be used as delimiter'
            ' for indentation level e.g. 4 or "\t" (Default: 2)'
        ),
    )

    parser.add_argument('filenames', nargs='*', help='Filenames to fix')
    args = parser.parse_args(argv)

    status = 0

    yaml = YAML()
    yaml.indent = args.indent
    # Prevent ruamel.yaml to wrap yaml lines
    yaml.width = maxsize

    separator = '---\n'

    for yaml_file in set(args.filenames):
        with open(yaml_file) as f:
            string_content = ''.join(f.readlines())

        # Split multi-document file into individual documents
        #
        # Not using yaml.load_all() because it reformats primitive (non-YAML) content. It removes
        # newline characters.
        separator_pattern = r'^---\s*\n'
        original_docs = re.split(separator_pattern, string_content, flags=re.MULTILINE)

        pretty_docs = []

        try:
            for doc in original_docs:
                content = _process_single_document(doc, yaml)
                if content is not None:
                    pretty_docs.append(content)

            # Start multi-doc file with separator
            pretty_content = '' if len(pretty_docs) == 1 else separator
            pretty_content += separator.join(pretty_docs)

            if string_content != pretty_content:
                print('File {} is not pretty-formatted'.format(yaml_file))

                if args.autofix:
                    print('Fixing file {}'.format(yaml_file))
                    with io.open(yaml_file, 'w', encoding='UTF-8') as f:
                        f.write(text_type(pretty_content))

                status = 1
        except YAMLError:  # pragma: no cover
            print(
                'Input File {} is not a valid YAML file, consider using check-yaml'.format(
                    yaml_file,
                ),
            )
            return 1

    return status
Пример #13
0
def merge_yamls(rule_file, tag_file, output_file):

    yaml = YAML(typ='rt')
    yaml.width = 4096  # prevent line wrap
    yaml.preserve_quotes = True

    indexed_rules_tags = {}
    with open(tag_file, "r") as file:
        rules_tags = yaml.load(file)
        for rule in rules_tags:
            indexed_rules_tags[rule['rule']] = rule['tags']

    stats = {
        'lists': 0,
        'macros': 0,
        'rules': 0,
        'rules_unmodified': 0,
        'rules_modified': 0,
        'rules_notfound': 0,
        'rules_notags': 0,
        'other': 0
    }
    other_items = []
    rules_not_found = []
    rules_no_tags_key = []
    required_engine_version = 0

    with open(rule_file, "r") as file:
        falco_doc = yaml.load(file)

        for item in falco_doc:
            if item.get("list") != None:
                stats['lists'] += 1
                continue
            if item.get("macro") != None:
                stats['macros'] += 1
                continue
            if item.get("required_engine_version") != None:
                required_engine_version = item.get("required_engine_version")
                continue

            if item.get("rule") == None:
                # Something that is not a rule, a macro or a list
                stats['other'] += 1
                other_items.append(item)
                continue

            # A rule
            stats['rules'] += 1

            if item.get("tags") == None:
                # Rule doesn't have a 'tags' key
                stats['rules_notags'] += 1
                rules_no_tags_key.append(item.get("rule"))
                continue
            if item.get("rule") not in indexed_rules_tags.keys():
                # Tags file doesn't have a rule with same name
                rules_not_found.append(item.get("rule"))
                continue
            if len(indexed_rules_tags[item.get("rule")]) == 0:
                # Tag file doesn't have new tags for this rule
                stats['rules_unmodified'] += 1
                continue

            # Append non existing tags
            for newtag in indexed_rules_tags[item.get("rule")]:
                if (not newtag in item['tags']):
                    item['tags'].append(newtag)

            stats['rules_modified'] += 1

        # Write output file

        with open(output_file, "w") as stream:
            stream.write('# Merged tags from ' + os.path.basename(tag_file) +
                         '\n\n')
            yaml.dump(falco_doc, stream)
            stream.close()

        # Output results

        if (len(rules_not_found) > 0):
            print("\nRules not found:")
            for rule in rules_not_found:
                print(rule)

        if (len(rules_no_tags_key) > 0):
            print("\nRules without 'tags' keyword:")
            for rule in rules_no_tags_key:
                print(rule)

        if (len(other_items) > 0):
            print("\nOther elements:")
            for item in other_items:
                print(item)

        print("\nLists: ", stats['lists'])
        print("Macros: ", stats['macros'])
        print("Rules: ", stats['rules'])
        print("  Modified rules: ", stats['rules_modified'])
        print("  Unmodified rules: ", stats['rules_unmodified'])
        print("  Rules not found: ", len(rules_not_found))
        print("  Rules no tags key: ", stats['rules_notags'])
        print("required_engine_version: ", required_engine_version)
        print("Other: ", len(other_items))

        if (len(rules_not_found) > 0):
            sys.exit(1)
        sys.exit(0)
Пример #14
0
import sys
import subprocess
import contextlib
import os
from collections.abc import MutableMapping, MutableSequence

from ruamel.yaml import YAML

yaml = YAML(typ='jinja2')
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.width = 320

DEFAULTS_REPO = "/Users/beckermr/Desktop/aggregate"


@contextlib.contextmanager
def pushd(new_dir):
    previous_dir = os.getcwd()
    os.chdir(new_dir)
    try:
        yield
    finally:
        os.chdir(previous_dir)


def _run_shell_command(cmd):
    subprocess.run(
        cmd,
        shell=True,
        check=True
    )
Пример #15
0
def main(
    chart_folders: List[Path] = typer.Argument(
        ..., help="Folders containing the chart to process"),
    check_branch: str = typer.Option(None,
                                     help="The branch to compare against."),
    chart_base_folder: Path = typer.Option(
        "charts", help="The base folder where the charts reside."),
    debug: bool = False,
):
    _setup_logging(debug)

    git_repository = Repo(search_parent_directories=True)

    if check_branch:
        logger.info(f"Trying to find branch {check_branch}...")
        branch = next((ref for ref in git_repository.remotes.origin.refs
                       if ref.name == check_branch), None)
    else:
        logger.info(f"Trying to determine default branch...")
        branch = next((ref for ref in git_repository.remotes.origin.refs
                       if ref.name == "origin/HEAD"), None)

    if not branch:
        logger.error(
            f"Could not find branch {check_branch} to compare against.")
        raise typer.Exit(1)

    logger.info(f"Comparing against branch {branch}")

    for chart_folder in chart_folders:
        chart_folder = chart_base_folder.joinpath(chart_folder)
        if not chart_folder.is_dir():
            logger.error(f"Could not find folder {str(chart_folder)}")
            raise typer.Exit(1)

        chart_metadata_file = chart_folder.joinpath('Chart.yaml')

        if not chart_metadata_file.is_file():
            logger.error(f"Could not find file {str(chart_metadata_file)}")
            raise typer.Exit(1)

        logger.info(f"Updating changelog annotation for chart {chart_folder}")

        yaml = YAML(typ=['rt', 'string'])
        yaml.indent(mapping=2, sequence=4, offset=2)
        yaml.explicit_start = True
        yaml.preserve_quotes = True
        yaml.width = 4096

        old_chart_metadata = yaml.load(
            git_repository.git.show(f"{branch}:{chart_metadata_file}"))
        new_chart_metadata = yaml.load(chart_metadata_file.read_text())

        try:
            old_chart_dependencies = old_chart_metadata["dependencies"]
        except KeyError:
            old_chart_dependencies = []

        try:
            new_chart_dependencies = new_chart_metadata["dependencies"]
        except KeyError:
            new_chart_dependencies = []

        annotations = []
        for dependency in new_chart_dependencies:
            old_dep = None
            if "alias" in dependency.keys():
                old_dep = next((old_dep for old_dep in old_chart_dependencies
                                if "alias" in old_dep.keys()
                                and old_dep["alias"] == dependency["alias"]),
                               None)
            else:
                old_dep = next((old_dep for old_dep in old_chart_dependencies
                                if old_dep["name"] == dependency["name"]),
                               None)

            add_annotation = False
            if old_dep:
                if dependency["version"] != old_dep["version"]:
                    add_annotation = True
            else:
                add_annotation = True

            if add_annotation:
                if "alias" in dependency.keys():
                    annotations.append({
                        "kind":
                        "changed",
                        "description":
                        f"Upgraded `{dependency['name']}` chart dependency to version {dependency['version']} for alias '{dependency['alias']}'"
                    })
                else:
                    annotations.append({
                        "kind":
                        "changed",
                        "description":
                        f"Upgraded `{dependency['name']}` chart dependency to version {dependency['version']}"
                    })

        if annotations:
            annotations = YAML(
                typ=['rt', 'string']).dump_to_string(annotations)

            if not "annotations" in new_chart_metadata:
                new_chart_metadata["annotations"] = CommentedMap()

            new_chart_metadata["annotations"][
                "artifacthub.io/changes"] = LiteralScalarString(annotations)
            yaml.dump(new_chart_metadata, chart_metadata_file)
Пример #16
0
def _is_recipe_solvable_on_platform(
        recipe_dir,
        cbc_path,
        platform,
        arch,
        build_platform_arch=None,
        additional_channels=(),
):
    # parse the channel sources from the CBC
    parser = YAML(typ="jinja2")
    parser.indent(mapping=2, sequence=4, offset=2)
    parser.width = 320

    with open(cbc_path) as fp:
        cbc_cfg = parser.load(fp.read())

    if "channel_sources" in cbc_cfg:
        channel_sources = cbc_cfg["channel_sources"][0].split(",")
    else:
        channel_sources = ["conda-forge", "defaults", "msys2"]

    if "msys2" not in channel_sources:
        channel_sources.append("msys2")

    if additional_channels:
        channel_sources = list(additional_channels) + channel_sources

    logger.debug(
        "MAMBA: using channels %s on platform-arch %s-%s",
        channel_sources,
        platform,
        arch,
    )

    # here we extract the conda build config in roughly the same way that
    # it would be used in a real build
    logger.debug("rendering recipe with conda build")

    for att in range(2):
        try:
            if att == 1:
                os.system("rm -f %s/conda_build_config.yaml" % recipe_dir)
            config = conda_build.config.get_or_merge_config(
                None,
                platform=platform,
                arch=arch,
                variant_config_files=[cbc_path],
            )
            cbc, _ = conda_build.variants.get_package_combined_spec(
                recipe_dir,
                config=config,
            )
        except Exception:
            if att == 0:
                pass
            else:
                raise

    # now we render the meta.yaml into an actual recipe
    metas = conda_build.api.render(
        recipe_dir,
        platform=platform,
        arch=arch,
        ignore_system_variants=True,
        variants=cbc,
        permit_undefined_jinja=True,
        finalize=False,
        bypass_env_check=True,
        channel_urls=channel_sources,
    )

    # get build info
    if build_platform_arch is not None:
        build_platform, build_arch = build_platform_arch.split("_")
    else:
        build_platform, build_arch = platform, arch

    # now we loop through each one and check if we can solve it
    # we check run and host and ignore the rest
    logger.debug("getting mamba solver")
    solver = _mamba_factory(tuple(channel_sources), f"{platform}-{arch}")
    build_solver = _mamba_factory(
        tuple(channel_sources),
        f"{build_platform}-{build_arch}",
    )
    solvable = True
    errors = []
    outnames = [m.name() for m, _, _ in metas]
    for m, _, _ in metas:
        logger.debug("checking recipe %s", m.name())

        build_req = m.get_value("requirements/build", [])
        host_req = m.get_value("requirements/host", [])
        run_req = m.get_value("requirements/run", [])
        ign_runex = m.get_value("build/ignore_run_exports", [])
        ign_runex_from = m.get_value("build/ignore_run_exports_from", [])

        if build_req:
            build_req = _clean_reqs(build_req, outnames)
            _solvable, _err, build_req, build_rx = build_solver.solve(
                build_req,
                get_run_exports=True,
                ignore_run_exports_from=ign_runex_from,
                ignore_run_exports=ign_runex,
            )
            solvable = solvable and _solvable
            if _err is not None:
                errors.append(_err)

            if m.is_cross:
                host_req = list(set(host_req) | build_rx["strong"])
                if not (m.noarch or m.noarch_python):
                    run_req = list(set(run_req) | build_rx["strong"])
            else:
                if m.noarch or m.noarch_python:
                    if m.build_is_host:
                        run_req = list(set(run_req) | build_rx["noarch"])
                else:
                    run_req = list(set(run_req) | build_rx["strong"])
                    if m.build_is_host:
                        run_req = list(set(run_req) | build_rx["weak"])
                    else:
                        host_req = list(set(host_req) | build_rx["strong"])

        if host_req:
            host_req = _clean_reqs(host_req, outnames)
            _solvable, _err, host_req, host_rx = solver.solve(
                host_req,
                get_run_exports=True,
                ignore_run_exports_from=ign_runex_from,
                ignore_run_exports=ign_runex,
            )
            solvable = solvable and _solvable
            if _err is not None:
                errors.append(_err)

            if m.is_cross:
                if m.noarch or m.noarch_python:
                    run_req = list(set(run_req) | host_rx["noarch"])
                else:
                    run_req = list(set(run_req) | host_rx["weak"])

        if run_req:
            run_req = apply_pins(run_req, host_req or [], build_req or [],
                                 outnames, m)
            run_req = _clean_reqs(run_req, outnames)
            _solvable, _err, _ = solver.solve(run_req)
            solvable = solvable and _solvable
            if _err is not None:
                errors.append(_err)

        tst_req = (m.get_value("test/requires", []) +
                   m.get_value("test/requirements", []) + run_req)
        if tst_req:
            tst_req = _clean_reqs(tst_req, outnames)
            _solvable, _err, _ = solver.solve(tst_req)
            solvable = solvable and _solvable
            if _err is not None:
                errors.append(_err)

    logger.info("RUN EXPORT cache status: %s", _get_run_export.cache_info())
    logger.info(
        "MAMBA SOLVER MEM USAGE: %d MB",
        psutil.Process().memory_info().rss // 1024**2,
    )

    return solvable, errors
Пример #17
0
from pathlib import Path
from shutil import copyfile
from typing import Any, List, Optional, Union

from colorama import Fore
from ruamel.yaml import YAML, CommentToken
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.error import CommentMark

from grayskull.base.extra import get_git_current_user
from grayskull.base.recipe_item import RecipeItem
from grayskull.base.section import Section

yaml = YAML(typ="jinja2")
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.width = 600


class AbstractRecipeModel(ABC):
    ALL_SECTIONS = (
        "package",
        "source",
        "build",
        "outputs",
        "requirements",
        "app",
        "test",
        "about",
        "extra",
    )
    re_jinja_var = re.compile(
Пример #18
0
    1. check bitrise API stack info for latest XCode version
    2. compare latest with current bitrise.yml stack version in repo
    3. if same exit, if not, continue
    4. modify bitrise.yml (update stack value)
    '''

    largest_semver = largest_version()
    tmp_file = 'tmp.yml'

    with open(BITRISE_YML, 'r') as infile:

        obj_yaml = YAML()

        # prevents re-formatting of yml file
        obj_yaml.preserve_quotes = True
        obj_yaml.width = 4096

        y = obj_yaml.load(infile)

        current_semver = y['workflows'][WORKFLOW]['meta']['bitrise.io'][
            'stack']

        # remove pattern prefix from current_semver to compare with largest
        current_semver = current_semver.split(pattern)[1]

        if current_semver == largest_semver:
            print('Xcode version unchanged! aborting.')
        else:
            print('New Xcode version available: {0} ... updating bitrise.yml!'.
                  format(largest_semver))
            # add prefix pattern back to be recognizable by bitrise
Пример #19
0
from git import Repo
import github
import os
from .utils import tmp_directory
from conda_smithy.github import configure_github_team
import textwrap
from functools import lru_cache

from ruamel.yaml import YAML

YAML_JINJA2 = YAML(typ='jinja2')
YAML_JINJA2.indent(mapping=2, sequence=4, offset=2)
YAML_JINJA2.width = 160
YAML_JINJA2.allow_duplicate_keys = True


@lru_cache(maxsize=None)
def get_filter_out_members():
    gh = github.Github(os.environ['GH_TOKEN'])
    org = gh.get_organization('conda-forge')
    teams = ['staged-recipes', 'help-r']
    gh_teams = list(team for team in org.get_teams() if team.name in teams)
    members = set()
    for team in gh_teams:
        members.update([m.login for m in team.get_members()])
    return members


def filter_members(members):
    out = get_filter_out_members()
    return [m for m in members if m not in out]
Пример #20
0
def _register_feedstock_outputs(feedstock):
    unames = set()

    # this is a common way in which feedstocks are wrong
    if os.path.exists("recipe/meta.yaml"):
        recipe_loc = "recipe"
    elif os.path.exists("recipe/recipe/meta.yaml"):
        recipe_loc = "recipe/recipe"
    else:
        raise RuntimeError("could not find recipe location!")

    cbcs = sorted(glob.glob(os.path.join(".ci_support", "*.yaml")))
    for cbc_fname in cbcs:
        # we need to extract the platform (e.g., osx, linux) and arch (e.g., 64, aarm64)
        # conda smithy forms a string that is
        #
        #  {{ platform }} if arch == 64
        #  {{ platform }}_{{ arch }} if arch != 64
        #
        # Thus we undo that munging here.
        _parts = os.path.basename(cbc_fname).split("_")
        platform = _parts[0]
        arch = _parts[1]
        if arch not in ["32", "aarch64", "ppc64le", "armv7l"]:
            arch = "64"

        # parse the channel sources from the CBC
        parser = YAML(typ="jinja2")
        parser.indent(mapping=2, sequence=4, offset=2)
        parser.width = 320

        with open(cbc_fname, "r") as fp:
            cbc_cfg = parser.load(fp.read())

        if "channel_sources" in cbc_cfg:
            channel_sources = cbc_cfg["channel_sources"][0].split(",")
        else:
            channel_sources = ["conda-forge", "defaults", "msys2"]

        if "msys2" not in channel_sources:
            channel_sources.append("msys2")

        # here we extract the conda build config in roughly the same way that
        # it would be used in a real build
        config = conda_build.config.get_or_merge_config(
            None,
            exclusive_config_file=cbc_fname,
            platform=platform,
            arch=arch,
        )
        cbc, _ = conda_build.variants.get_package_combined_spec(recipe_loc,
                                                                config=config)

        # now we render the meta.yaml into an actual recipe
        metas = conda_build.api.render(
            recipe_loc,
            platform=platform,
            arch=arch,
            ignore_system_variants=True,
            variants=cbc,
            permit_undefined_jinja=True,
            finalize=False,
            bypass_env_check=True,
            channel_urls=channel_sources,
        )

        for m, _, _ in metas:
            unames.add(m.name())

    print("    output names:", unames)

    for name in unames:
        sharded_name = _get_sharded_path(name)
        outpth = os.path.join(
            os.environ["FEEDSTOCK_OUTPUTS_REPO"],
            sharded_name,
        )

        subprocess.run(["git", "pull", "--quiet"],
                       check=True,
                       cwd=os.environ["FEEDSTOCK_OUTPUTS_REPO"])

        if not os.path.exists(outpth):
            os.makedirs(os.path.dirname(outpth), exist_ok=True)
            with open(outpth, "w") as fp:
                json.dump({"feedstocks": [feedstock]}, fp)

            subprocess.run(["git", "add", sharded_name],
                           check=True,
                           cwd=os.environ["FEEDSTOCK_OUTPUTS_REPO"])

            subprocess.run([
                "git", "commit", "-am",
                "[ci skip] [skip ci] [cf admin skip] ***NO_CI*** "
                "added output %s for conda-forge/%s" % (name, feedstock)
            ],
                           check=True,
                           cwd=os.environ["FEEDSTOCK_OUTPUTS_REPO"])

            subprocess.run(["git", "push", "--quiet"],
                           check=True,
                           cwd=os.environ["FEEDSTOCK_OUTPUTS_REPO"])
            print("    added output:", name)
 def __init__(self, file_name):
     """Create a schema for my tests."""
     self.__schema = Schema({
         Tags.FOLDER_TAG:
         str,
         Tags.HOMEWORKS_TAG: [{
             Tags.NAME_TAG:
             str,
             Tags.FOLDER_TAG:
             str,
             Optional(Tags.DEADLINE_TAG, default=MAX_DATE_STR):
             str,
             Tags.TASKS_TAG: [{
                 Tags.NAME_TAG:
                 str,
                 Tags.LANGUAGE_TAG:
                 Or(LangTags.CPP, LangTags.BASH),
                 Tags.FOLDER_TAG:
                 str,
                 Optional(Tags.OUTPUT_TYPE_TAG, default=OutputTags.STRING):
                 Or(OutputTags.STRING, OutputTags.NUMBER),
                 Optional(Tags.COMPILER_FLAGS_TAG, default="-Wall"):
                 str,
                 Optional(Tags.BINARY_NAME_TAG, default="main"):
                 str,
                 Optional(Tags.PIPE_TAG, default=""):
                 str,
                 Optional(Tags.BUILD_TYPE_TAG, default=BuildTags.CMAKE):
                 Or(BuildTags.CMAKE, BuildTags.SIMPLE),
                 Optional(Tags.INJECT_FOLDER_TAG): [str],
                 Optional(Tags.TESTS_TAG): [{
                     Tags.NAME_TAG:
                     str,
                     Optional(Tags.INPUT_TAG):
                     str,
                     Optional(Tags.INJECT_FOLDER_TAG): [str],
                     Optional(Tags.RUN_GTESTS_TAG, default=False):
                     bool,
                     Optional(Tags.EXPECTED_OUTPUT_TAG):
                     Or(str, float, int)
                 }]
             }]
         }]
     })
     yaml = YAML()
     yaml.width = 4096  # big enough value to prevent wrapping
     yaml.explicit_start = True
     yaml.indent(mapping=2, sequence=4, offset=2)
     with open(file_name, 'r') as stream:
         contents = SchemaManager.__to_simple_dict(yaml.load(stream))
         try:
             self.__validated_yaml = self.__schema.validate(contents)
         except SchemaError as exc:
             sys.exit(exc.code)
     # Write the schema every time we run this code while developing. We
     # don't want to run this when the package is installed as this we won't
     # have the permission. This is intended to keep the schema file up to
     # date when we add new stuff to it.
     try:
         with open(SCHEMA_FILE, 'w') as outfile:
             str_dict = SchemaManager.__sanitize_value(
                 self.__schema._schema)
             yaml.dump(str_dict, outfile)
     except OSError:
         log.debug(
             "Cannot write schema file. We only use this while developing.")
def pretty_format_yaml(argv=None):
    # type: (typing.Optional[typing.List[typing.Text]]) -> int
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--autofix",
        action="store_true",
        dest="autofix",
        help="Automatically fixes encountered not-pretty-formatted files",
    )
    parser.add_argument(
        "--indent",
        type=int,
        default="2",
        help=("The number of indent spaces or a string to be used as delimiter" ' for indentation level e.g. 4 or "\t" (Default: 2)'),
    )
    parser.add_argument(
        "--preserve-quotes",
        action="store_true",
        dest="preserve_quotes",
        help="Keep existing string quoting",
    )

    parser.add_argument("filenames", nargs="*", help="Filenames to fix")
    args = parser.parse_args(argv)

    status = 0

    yaml = YAML()
    yaml.indent = args.indent
    yaml.preserve_quotes = args.preserve_quotes
    # Prevent ruamel.yaml to wrap yaml lines
    yaml.width = maxsize

    separator = "---\n"

    for yaml_file in set(args.filenames):
        with open(yaml_file) as input_file:
            string_content = "".join(input_file.readlines())

        # Split multi-document file into individual documents
        #
        # Not using yaml.load_all() because it reformats primitive (non-YAML) content. It removes
        # newline characters.
        separator_pattern = r"^---\s*\n"
        original_docs = re.split(separator_pattern, string_content, flags=re.MULTILINE)

        # A valid multi-document YAML file might starts with the separator.
        # In this case the first document of original docs will be empty and should not be consdered
        if string_content.startswith("---"):
            original_docs = original_docs[1:]

        pretty_docs = []

        try:
            for doc in original_docs:
                content = _process_single_document(doc, yaml)
                if content is not None:
                    pretty_docs.append(content)

            # Start multi-doc file with separator
            pretty_content = "" if len(pretty_docs) == 1 else separator
            pretty_content += separator.join(pretty_docs)

            if string_content != pretty_content:
                print("File {} is not pretty-formatted".format(yaml_file))

                if args.autofix:
                    print("Fixing file {}".format(yaml_file))
                    with io.open(yaml_file, "w", encoding="UTF-8") as output_file:
                        output_file.write(text_type(pretty_content))

                status = 1
        except YAMLError:  # pragma: no cover
            print(
                "Input File {} is not a valid YAML file, consider using check-yaml".format(
                    yaml_file,
                ),
            )
            return 1

    return status
 def __init__(self, meta_yaml):
     _yml = YAML(typ='jinja2')
     _yml.indent(mapping=2, sequence=4, offset=2)
     _yml.width = 160
     _yml.allow_duplicate_keys = True
     self.meta = _yml.load(meta_yaml)
Пример #24
0
def main(argv):
    args = parse_args()

    ingress_cn = set_ingressclassname(args.kubernetes)
    script_path = Path(__file__).absolute()
    mysql_values_file = script_path.parent.parent / "./etc/mysql_values.yaml"
    db_pass = gen_password()
    if (args.verbose):
        print(f"mysql_values_file  is {mysql_values_file}")
        print(f"mysql password is {db_pass}")

    ## check the yaml of these files because ruamel python lib has issues with loading em
    yaml_files_check_list = [
        'ml-operator/values.yaml', 'emailnotifier/values.yaml'
    ]

    ports_array = {
        "simapi":
        "3000",
        "reportapi":
        "3002",
        "testapi":
        "3003",
        "https":
        "80",
        "http":
        "80",
        "http-admin":
        "4001",
        "http-api":
        "4002",
        "mysql":
        "3306",
        "mongodb":
        "27017",
        "inboundapi":
        "{{ $config.config.schemeAdapter.env.INBOUND_LISTEN_PORT }}",
        "outboundapi":
        "{{ $config.config.schemeAdapter.env.OUTBOUND_LISTEN_PORT }}"
    }

    ingress_cn = set_ingressclassname(args.kubernetes)
    print(f"ingressclassname in main is {ingress_cn}")
    p = Path() / args.directory
    print(f"Processing helm charts in directory: [{args.directory}]")
    yaml = YAML()
    yaml.allow_duplicate_keys = True
    yaml.preserve_quotes = True
    yaml.width = 4096

    # walk the directory structure and process all the values.yaml files
    # replace solsson kafka with kymeric
    # replace kafa start up check with netcat test (TODO check to see if this is ok)
    # replace mysql with arm version of mysql and adjust tag on the following line (TODO: check that latest docker mysql/mysql-server latest tag is ok )
    # TODO: maybe don't do this line by line but rather read in the entire file => can match across lines and avoid the next_line_logic
    # for now disable metrics and metrics exporting
    # replace the mojaloop images with the locally built  ones

    print(
        " ==> mod_local_miniloop : Modify helm template files (.tpl) to implement networking/v1"
    )
    # modify the template files
    for vf in p.rglob('*.tpl'):
        backupfile = Path(vf.parent) / f"{vf.name}_bak"
        with FileInput(files=[str(vf)], inplace=True) as f:
            for line in f:
                line = line.rstrip()
                #replace networking v1beta1
                line = re.sub(r"networking.k8s.io/v1beta1",
                              r"networking.k8s.io/v1", line)
                line = re.sub(r"extensions/v1beta1", r"networking.k8s.io/v1",
                              line)
                print(line)

    # modify the ingress.yaml files
    print(
        " ==> mod_local_miniloop : Modify helm template ingress.yaml files to implement newer ingress"
    )
    print(
        f" ==> mod_local_miniloop : Modify helm template ingress.yaml implement correct ingressClassName [{ingress_cn}]"
    )
    for vf in p.rglob('*/ingress.yaml'):
        backupfile = Path(vf.parent) / f"{vf.name}_bak"

        with FileInput(files=[str(vf)], inplace=True) as f:
            for line in f:
                line = line.rstrip()
                if re.search("path:", line):
                    line_dup = line
                    line_dup = re.sub(r"- path:.*$",
                                      r"  pathType: ImplementationSpecific",
                                      line_dup)
                    print(line)
                    print(line_dup)
                elif re.search("serviceName:", line):
                    line_dup = line
                    line_dup = re.sub(r"serviceName:.*$", r"service:",
                                      line_dup)
                    print(line_dup)
                    line = re.sub(r"serviceName:", r"  name:", line)
                    print(line)
                elif re.search("servicePort:", line):
                    line_dup = line
                    line_dup = re.sub(r"servicePort:.*$", r"  port:", line_dup)
                    line = re.sub(r"servicePort: ", r"    number: ", line)
                    # need to replace port names with numbers
                    for pname, pnum in ports_array.items():
                        line = re.sub(f"number: {pname}$", f"number: {pnum}",
                                      line)
                    print(line_dup)
                    print(line)
                elif re.search("ingressClassName", line):
                    # skip any ingressClassname already set => we can re-run program without issue
                    continue
                elif re.search("spec:", line):
                    print(line)
                    print(f"  ingressClassName: {ingress_cn}")
                else:
                    print(line)

    # put the database password file into the mysql helm chart values file
    print(f" ==> mod_local_miniloop : generating a new database password")
    print(
        f" ==> mod_local_miniloop : insert new pw into [{mysql_values_file}]")
    with FileInput(files=[str(mysql_values_file)], inplace=True) as f:
        for line in f:
            line = line.rstrip()
            line = re.sub(r"password: .*$", r"password: '******'",
                          line)
            line = re.sub(r"mysql_native_password BY .*$",
                          r"mysql_native_password BY '" + db_pass + "';", line)
            print(line)

    print(
        " ==> mod_local_miniloop : Modify helm values to implement single mysql database"
    )
    for vf in p.glob('**/*values.yaml'):
        with open(vf) as f:
            if (args.verbose):
                print(f"===> Processing file < {vf.parent}/{vf.name} > ")
            skip = False
            for fn in yaml_files_check_list:
                if vf == Path(fn):
                    if (args.verbose):
                        print(
                            f"This yaml file needs checking skipping load/processing for now =>  {Path(fn)} "
                        )
                    skip = True
            if not skip:
                data = yaml.load(f)

            for x, value in lookup("mysql", data):
                if (value.get("name") == "wait-for-mysql"):
                    value['repository'] = "mysql"
                    value['tag'] = '8.0'
                if value.get("mysqlDatabase"):
                    value['enabled'] = False

            # update the values files to use a mysql instance that has already been deployed
            # and that uses a newly generated database password
            for x, value in lookup("config", data):
                if isinstance(value, dict):
                    if (value.get('db_type')):
                        value['db_host'] = 'mldb'
                        value['db_password'] = db_pass

            ### need to set nameOverride  for mysql for ml-testing-toolkit as it appears to be missing
            # if vf == Path('mojaloop/values.yaml') :
            #     print("Updating the ml-testing-toolkit / mysql config ")
            #     for x, value in lookup("ml-testing-toolkit", data):
            #         value['mysql'] = { "nameOverride" : "ttk-mysql" }

        with open(vf, "w") as f:
            yaml.dump(data, f)

    # now that we are inserting passwords with special characters in the password it is necessary to ensure
    # that $db_password is single quoted in the values files.
    print(
        " ==> mod_local_miniloop : Modify helm values, single quote db_password field to enable secure database password"
    )
    for vf in p.glob('**/*values.yaml'):
        with FileInput(files=[str(vf)], inplace=True) as f:
            for line in f:
                line = line.rstrip()
                line = re.sub(r"\'\$db_password\'", r"$db_password",
                              line)  # makes this re-runnable.
                line = re.sub(r'\$db_password', r"'$db_password'", line)
                print(line)

    # versions of k8s -> 1.20 use containerd not docker and the percona chart
    # or at least the busybox dependency of the percona chart has an issue
    # So here update the chart dependencies to ensure correct mysql is configured
    # using the bitnami helm chart BUT as we are disabling the database in the
    # values files and relying on separately deployed database this update is not really
    # doing anything. see the mini-loop scripts dir for where and how the database deployment
    # is now done.
    print(
        " ==> mod_local_miniloop : Modify helm requirements.yaml replace deprecated percona chart with current mysql"
    )
    for rf in p.rglob('**/*requirements.yaml'):
        with open(rf) as f:
            reqs_data = yaml.load(f)
            #print(reqs_data)
        try:
            dlist = reqs_data['dependencies']
            for i in range(len(dlist)):
                if (dlist[i]['name'] in ["percona-xtradb-cluster", "mysql"]):
                    dlist[i]['name'] = "mysql"
                    dlist[i]['version'] = 8.0
                    dlist[i][
                        'repository'] = "https://charts.bitnami.com/bitnami"
                    dlist[i]['alias'] = "mysql"
                    dlist[i]['condition'] = "mysql.enabled"

        except Exception:
            continue

        with open(rf, "w") as f:
            yaml.dump(reqs_data, f)

    print(
        f"Sucessfully finished processing helm charts in directory: [{args.directory}]"
    )
Пример #25
0
import subprocess
import argparse
import os
import click
import ujson
from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import FoldedScalarString
from pkg_resources import parse_version
import shutil

ryaml = YAML()
ryaml.preserve_quotes = True
ryaml.width = 50000  # make sure long lines will not break (relevant for code section)


def should_keep_yml_file(yml_content, new_to_version):
    if parse_version(yml_content.get('toversion', '99.99.99')) < parse_version(new_to_version) or \
            parse_version(yml_content.get('fromversion', '0.0.0')) >= parse_version(new_to_version):
        return False

    return True


def should_keep_json_file(json_content, new_to_version):
    if parse_version(json_content.get('toVersion', '99.99.99')) < parse_version(new_to_version) or \
            parse_version(json_content.get('fromVersion', '0.0.0')) >= parse_version(new_to_version):
        return False

    return True

Пример #26
0
def main(docname):

    with open(docname, "r") as fi:
        lines = fi.readlines()
    context = {}
    rest_lines = []
    for line in lines:
        # print(line)
        if "{%" in line:
            set_expr = re.search("{%(.*)%}", line)
            set_expr = set_expr.group(1)
            set_expr = set_expr.replace("set", "", 1).strip()
            exec(set_expr, globals(), context)
        else:
            rest_lines.append(line)

    yaml = YAML(typ="rt")
    yaml.preserve_quotes = True
    yaml.default_flow_style = False
    yaml.indent(sequence=4, offset=2)
    yaml.width = 1000
    yaml.Representer = MyRepresenter
    yaml.Loader = ruamel.yaml.RoundTripLoader

    result_yaml = CommentedMap()
    result_yaml["context"] = context

    def has_selector(s):
        return s.strip().endswith("]")

    quoted_lines = []
    for line in rest_lines:
        if has_selector(line):
            selector_start = line.rfind("[")
            selector_end = line.rfind("]")
            selector_content = line[selector_start + 1 : selector_end]

            if line.strip().startswith("-"):
                line = (
                    line[: line.find("-") + 1]
                    + f" sel({selector_content}): "
                    + line[
                        line.find("-") + 1 : min(line.rfind("#"), line.rfind("["))
                    ].strip()
                    + "\n"
                )
        quoted_lines.append(line)
    rest_lines = quoted_lines

    def check_if_quoted(s):
        s = s.strip()
        return s.startswith('"') or s.startswith("'")

    quoted_lines = []
    for line in rest_lines:
        if "{{" in line:
            # make sure that jinja stuff is quoted
            if line.find(":") != -1:
                idx = line.find(":")
            elif line.strip().startswith("-"):
                idx = line.find("-")
            rest = line[idx + 1 :]

            if not check_if_quoted(rest):
                if "'" in rest:
                    rest = rest.replace("'", '"')

                line = line[: idx + 1] + f" '{rest.strip()}'\n"
        quoted_lines.append(line)
    rest_lines = quoted_lines

    skips, wo_skip_lines = [], []
    for line in rest_lines:
        if line.strip().startswith("skip"):
            parts = line.split(":")
            rhs = parts[1].strip()
            if rhs.startswith("true"):
                selector_start = line.rfind("[")
                selector_end = line.rfind("]")
                selector_content = line[selector_start + 1 : selector_end]
                skips.append(selector_content)
            else:
                print("ATTENTION skip: false not handled!")
        else:
            wo_skip_lines.append(line)

    rest_lines = wo_skip_lines
    result_yaml.update(
        ruamel.yaml.load("".join(rest_lines), ruamel.yaml.RoundTripLoader)
    )

    if len(skips) != 0:
        result_yaml["build"]["skip"] = skips

    if result_yaml.get("outputs"):
        for o in result_yaml["outputs"]:
            name = o["name"]
            package = {"name": name}
            del o["name"]
            if o.get("version"):
                package["version"] = o["version"]
                del o["version"]

            build = {}
            if o.get("script"):
                build["script"] = o["script"]
                del o["script"]

            o["package"] = package
            o["build"] = build

        for d in result_yaml["outputs"]:
            print(order_output_dict(d))
        result_yaml["outputs"] = [order_output_dict(d) for d in result_yaml["outputs"]]

    from io import StringIO

    output = StringIO()
    yaml.dump(result_yaml, output)

    # Hacky way to insert an empty line after the context-key-object
    context_output = StringIO()
    yaml.dump(context, context_output)
    context_output = context_output.getvalue()
    context_output_len = len(context_output.split("\n"))

    final_result = output.getvalue()
    final_result_lines = final_result.split("\n")
    final_result_lines.insert(context_output_len, "")

    print("\n".join(final_result_lines))
Пример #27
0
import subprocess
import argparse
import os
from typing import Dict

import click
import ujson
from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import FoldedScalarString
from pkg_resources import parse_version
import shutil

ryaml = YAML()
ryaml.preserve_quotes = True  # type: ignore
# make sure long lines will not break (relevant for code section)
ryaml.width = 50000  # type: ignore

NON_CIRCLE_TEST_PLAYBOOKS_DIRECTORY = 'TestPlaybooks'
DEPRECATED_NON_CIRCLE_TESTS_DIRECTORY = os.path.join('TestPlaybooks', 'NonCircleTests', 'Deprecated')


def should_keep_yml_file(yml_content: Dict, new_to_version: str) -> bool:
    """Check if yml file should stay in the feature branch"""
    if parse_version(yml_content.get('toversion', '99.99.99')) < parse_version(new_to_version) or \
            parse_version(yml_content.get('fromversion', '0.0.0')) >= parse_version(new_to_version):
        return False

    return True


def should_keep_json_file(json_content: Dict, new_to_version: str) -> bool:
Пример #28
0
    def create_compose_file(self):
        '''
        networks:
          testnet:
            ipam:
              config:
                - subnet: 172.23.0.0/24
        networks:
            testnet:
               ipv4_address: 172.23.0.3

        '''

        ds = {
            'version': '3',
            'networks': {
                'ssonet': {
                    'ipam': {
                        'config': [{
                            'subnet': '172.23.0.0/24'
                        }]
                    }
                }
            },
            'services': {
                'kcpostgres': {
                    'container_name': 'kcpostgres',
                    'image': 'postgres:12.2',
                    'environment': {
                        'POSTGRES_DB': 'keycloak',
                        'POSTGRES_USER': '******',
                        'POSTGRES_PASSWORD': '******',
                    },
                    'networks': {
                        'ssonet': {
                            'ipv4_address': '172.23.0.2'
                        }
                    }
                },
                'sso.local.redhat.com': {
                    'container_name': 'sso.local.redhat.com',
                    'image': 'quay.io/keycloak/keycloak:11.0.0',
                    'environment': {
                        'DB_VENDOR': 'postgres',
                        'DB_ADDR': 'kcpostgres',
                        'DB_DATABASE': 'keycloak',
                        'DB_USER': '******',
                        'DB_PASSWORD': '******',
                        'PROXY_ADDRESS_FORWARDING': "true",
                        'KEYCLOAK_USER': '******',
                        'KEYCLOAK_PASSWORD': '******',
                    },
                    'ports': ['8443:8443'],
                    'depends_on': ['kcpostgres'],
                    'networks': {
                        'ssonet': {
                            'ipv4_address': '172.23.0.3'
                        }
                    }
                },
                'kcadmin': {
                    'container_name':
                    'kcadmin',
                    'image':
                    'python:3',
                    'build': {
                        'context':
                        f"{os.path.join(self.checkouts_root, 'keycloak_admin')}",
                    },
                    'volumes': [
                        f"./{os.path.join(self.checkouts_root, 'keycloak_admin')}:/app"
                    ],
                    'depends_on': ['sso.local.redhat.com'],
                    #'command': '/bin/bash -c "cd /app && pip install -r requirements.txt && flask run --host=0.0.0.0 --port=80"'
                    'command':
                    '/bin/bash -c "cd /app && pip install -r requirements.txt && python -c \'from kchelper import init_realm; init_realm()\' && flask run --host=0.0.0.0 --port=80"',
                    'networks': {
                        'ssonet': {
                            'ipv4_address': '172.23.0.4'
                        }
                    }
                },
                'insights_proxy': {
                    'container_name':
                    'insights_proxy',
                    'image':
                    'redhatinsights/insights-proxy',
                    'ports': ['1337:1337'],
                    'environment': ['PLATFORM=linux', 'CUSTOM_CONF=true'],
                    'security_opt': ['label=disable'],
                    'extra_hosts': ['prod.foo.redhat.com:127.0.0.1'],
                    'volumes': [
                        f'./{os.path.join(self.checkouts_root, "www", "spandx.config.js")}:/config/spandx.config.js'
                    ]
                },
                'webroot': {
                    'container_name':
                    'webroot',
                    'image':
                    'nginx',
                    'volumes': [
                        f"./{os.path.join(self.checkouts_root, 'www')}:/usr/share/nginx/html"
                    ],
                    'command': ['nginx-debug', '-g', 'daemon off;']
                },
                'chrome': {
                    'container_name':
                    'chrome',
                    'image':
                    'nginx',
                    'volumes': [
                        f"./{os.path.join(self.checkouts_root, 'insights-chrome')}:/usr/share/nginx/html"
                    ],
                    'command': ['nginx-debug', '-g', 'daemon off;']
                },
                'chrome_beta': {
                    'container_name':
                    'chrome_beta',
                    'image':
                    'nginx',
                    'volumes': [
                        f"./{os.path.join(self.checkouts_root, 'insights-chrome')}:/usr/share/nginx/html"
                    ],
                    'command': ['nginx-debug', '-g', 'daemon off;']
                },
                'landing': {
                    'container_name':
                    'landing',
                    'image':
                    'nginx',
                    'volumes': [
                        f"./{os.path.join(self.checkouts_root, 'landing-page-frontend', 'dist')}:/usr/share/nginx/html/apps/landing"
                    ],
                    'command': ['nginx-debug', '-g', 'daemon off;']
                },
                'landing_beta': {
                    'container_name':
                    'landing_beta',
                    'image':
                    'nginx',
                    'volumes': [
                        f"./{os.path.join(self.checkouts_root, 'landing-page-frontend', 'dist')}:/usr/share/nginx/html/beta/apps/landing"
                    ],
                    'command': ['nginx-debug', '-g', 'daemon off;']
                },
                'entitlements': {
                    'container_name':
                    'entitlements',
                    'image':
                    'python:3',
                    'build': {
                        'context':
                        f"{os.path.join(self.checkouts_root, 'entitlements')}",
                    },
                    'volumes': [
                        f"./{os.path.join(self.checkouts_root, 'entitlements')}:/app"
                    ],
                    'command':
                    '/bin/bash -c "cd /app && pip install -r requirements.txt && python api.py"'
                },
                'rbac': {
                    'container_name':
                    'rbac',
                    'image':
                    'python:3',
                    'build': {
                        'context':
                        f"{os.path.join(self.checkouts_root, 'rbac')}",
                    },
                    'volumes':
                    [f"./{os.path.join(self.checkouts_root, 'rbac')}:/app"],
                    'command':
                    '/bin/bash -c "cd /app && pip install -r requirements.txt && python api.py"'
                }
            }
        }

        # add frontend if path or hash given
        if self.args.frontend_path or self.args.frontend_hash:
            if self.args.frontend_hash:
                raise Exception('frontend hash not yet implemented!')
            elif self.args.frontend_path:
                fs = {
                    'container_name':
                    'aafrontend',
                    'image':
                    'node:10.22.0',
                    'user':
                    '******',
                    'ports': ['8002:8002'],
                    'environment': {
                        'DEBUG': '*:*',
                    },
                    'command':
                    '/bin/bash -c "cd /app && npm install && npm run start:container"',
                    'volumes': [
                        f"{os.path.abspath(os.path.expanduser(self.args.frontend_path))}:/app"
                    ]
                }
                ds['services']['frontend'] = fs

        else:
            # build the frontend?
            aa_fe_srcpath = os.path.join(self.checkouts_root,
                                         'tower-analytics-frontend')
            fs = {
                'container_name': 'aafrontend',
                'image': 'node:10.22.0',
                'user': '******',
                'ports': ['8002:8002'],
                'environment': {
                    'DEBUG': '*:*',
                },
                'command':
                '/bin/bash -c "cd /app && npm install && npm run start:container"',
                'volumes': [f"./{aa_fe_srcpath}:/app"]
            }
            ds['services']['aafrontend'] = fs

        # build the backend?
        if self.args.backend_mock:
            aa_be_srcpath = os.path.join(self.checkouts_root,
                                         'aa_backend_mock')
            bs = {
                'container_name':
                'aabackend',
                'image':
                'python:3',
                'build': {
                    'context': f"./{aa_be_srcpath}"
                },
                'environment': {
                    'API_SECURE': '1',
                },
                'volumes': [f"./{aa_be_srcpath}:/app"],
                'command':
                '/bin/bash -c "cd /app && pip install -r requirements.txt && python api.py"'
            }
            ds['services']['aabackend'] = bs
        else:
            raise Exception('real backend not yet implemented!')
        '''
        kctuple = '%s:%s' % ('sso.local.redhat.com', self.keycloak_ip)
        for k,v in ds['services'].items():
            #'extra_hosts': ['prod.foo.redhat.com:127.0.0.1'],
            if 'extra_hosts' not in v:
                ds['services'][k]['extra_hosts'] = []
            if kctuple not in ds['services'][k]['extra_hosts']:
                ds['services'][k]['extra_hosts'].append(kctuple)
        '''

        yaml = YAML(typ='rt', pure=True)
        yaml.preserve_quotes = True
        yaml.indent = 4
        yaml.block_seq_indent = 4
        yaml.explicit_start = True
        yaml.width = 1000
        yaml.default_flow_style = False

        #pprint(ds)

        with open('genstack.yml', 'w') as f:
            yaml.dump(ds, f)