Ejemplo n.º 1
0
    def copy_dir_yml(self, dir_path, bundle):
        """
        Copy the yml files inside a directory to a bundle.

        :param dir_path: source directory
        :param bundle: destination bundle
        :return: None
        """
        scan_files, _ = get_yml_paths_in_dir(dir_path, error_msg='')
        content_files = 0
        dir_name = os.path.basename(dir_path)
        for path in scan_files:
            if len(os.path.basename(path)) >= self.file_name_max_size:
                self.long_file_names.append(path)

            ryaml = YAML()
            ryaml.allow_duplicate_keys = True
            with io.open(path, mode='r', encoding='utf-8') as file_:
                yml_info = ryaml.load(file_)
            ver = yml_info.get('fromversion', '0')
            print(f' - processing: {ver} ({path})')
            if dir_name in ['Playbooks', 'TestPlaybooks']:
                # in TestPlaybook dir we might have scripts - all should go to test_bundle
                if dir_name == 'TestPlaybooks' and os.path.basename(path).startswith('script-'):
                    self.copy_content_yml(path, os.path.join(bundle, os.path.basename(path)), yml_info)
                self.copy_playbook_yml(path, os.path.join(bundle, os.path.basename(path)))
            else:
                self.copy_content_yml(path, os.path.join(bundle, os.path.basename(path)), yml_info)
            content_files += 1
        print(f' - total files: {content_files}')
Ejemplo n.º 2
0
    async def _read_secrets(self):
        """Read secrets.yaml into memory."""
        if not self.path_secrets.exists():
            _LOGGER.debug("Home Assistant secrets not exists")
            return

        # Read secrets
        try:
            yaml = YAML()
            yaml.allow_duplicate_keys = True
            data = await self.sys_run_in_executor(yaml.load,
                                                  self.path_secrets) or {}

            # Filter to only get supported values
            self.secrets = {
                k: v
                for k, v in data.items()
                if isinstance(v, (bool, float, int, str))
            }

        except YAMLError as err:
            _LOGGER.error("Can't process Home Assistant secrets: %s", err)
        else:
            _LOGGER.debug("Reload Home Assistant secrets: %s",
                          len(self.secrets))
Ejemplo n.º 3
0
 def yaml(self) -> YAML:
     """Creating an instance of ruamel for each command. Best practice by ruamel"""
     yaml = YAML()
     yaml.allow_duplicate_keys = self._allow_duplicate_keys
     yaml.preserve_quotes = self._allow_duplicate_keys
     yaml.width = self._width
     return yaml
 def __init__(self, yaml_fn, config_name):
     super().__init__()
     with open(yaml_fn) as fp:
         yaml = YAML(typ='unsafe')
         yaml.allow_duplicate_keys = True
         for k, v in yaml.load(fp)[config_name].items():
             self.add_hparam(k, v)
def _watcher(osde2ectl_cmd, account_config, my_path, cluster_count, delay,
             my_uuid):
    logging.info('Watcher thread started')
    logging.info('Getting status every %d seconds' % int(delay))
    yaml = YAML(pure=True)
    yaml.default_flow_style = False
    yaml.explicit_start = False
    yaml.explicit_end = False
    yaml.allow_duplicate_keys = True
    yaml.dump(account_config, open(my_path + "/account_config.yaml", 'w'))
    my_config = yaml.load(open(my_path + "/account_config.yaml"))
    my_thread = threading.currentThread()
    cmd = [osde2ectl_cmd, "list", "--custom-config", "account_config.yaml"]
    # To stop the watcher we expect the run attribute to be not True
    while getattr(my_thread, "run", True):
        logging.debug(cmd)
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   cwd=my_path,
                                   universal_newlines=True)
        stdout, stderr = process.communicate()

        cluster_count = 0
        state = {}
        status = {}
        error = []
        # Count the various states/status' and report it to logging
        for line in stdout.splitlines():
            if my_config['ocm']['userOverride'] in line:
                cluster_count += 1
                state_key = line.split()[2]
                status_key = line.split()[3]
                state[state_key] = state.get(state_key, 0) + 1
                status[status_key] = status.get(status_key, 0) + 1

                if state_key == "error":
                    error.append(line.split()[1])
                    logging.debug(line.split()[1])

        logging.info('Requested Clusters for test %s: %d' %
                     (my_uuid, cluster_count))
        if cluster_count != 0:
            logging.debug(state.items())
            logging.debug(status.items())
            state_output = "Current clusters state: " + str(
                cluster_count) + " clusters"
            status_output = "Current clusters status: " + str(
                cluster_count) + " clusters"
            for i1 in state.items():
                state_output += " (" + str(i1[0]) + ": " + str(i1[1]) + ")"
            for i2 in status.items():
                status_output += " (" + str(i2[0]) + ": " + str(i2[1]) + ")"
            logging.info(state_output)
            logging.info(status_output)
            if error:
                logging.warning('Clusters in error state: %s' % error)

        time.sleep(delay)
    logging.info('Watcher exiting')
Ejemplo n.º 6
0
def get_template_base(ctx, pathname):
    '''Reads the template YAML file and returns its contents'''
    yaml = YAML()
    yaml.allow_duplicate_keys = True

    try:
        with open(pathname, 'r') as f:
            return yaml.load(f)

    except OSError as e:
        fail(ctx,
             'ERROR Unable to read YAML file {}: {}'.format(pathname, str(e)))
Ejemplo n.º 7
0
def get_base_variables(ctx, base_dir, os_name, os_template, os_version):
    '''
    '''

    template_file = '{}.yaml'.format(os_template)

    yaml = YAML()
    yaml.allow_duplicate_keys = True

    with open(path.join(base_dir, os_name, os_version, template_file),
              'r') as f:
        return yaml.load(f)
Ejemplo n.º 8
0
def load_yaml(fname):
    """Load a YAML file."""
    yaml = YAML(typ="safe")
    # Compat with HASS
    yaml.allow_duplicate_keys = True
    # Stub HASS constructors
    HassSafeConstructor.name = fname
    yaml.Constructor = HassSafeConstructor

    with open(fname, encoding="utf-8") as conf_file:
        # If configuration file is empty YAML returns None
        # We convert that to an empty dict
        return yaml.load(conf_file) or {}
Ejemplo n.º 9
0
def run():
    logger = set_logger()

    with open('grid.yaml') as fp:
        temp = YAML()
        temp.allow_duplicate_keys = True
        settings = temp.load(fp)
        test_set = sys.argv[1:] if len(
            sys.argv) > 1 else settings['common']['config']
        all_args = [settings[t] for t in test_set]
        entrypoint = settings['common']['entrypoint']
    with open('default.yaml') as fp:
        temp = YAML()
        temp.allow_duplicate_keys = True
        settings_default = temp.load(fp)
        os.environ['suffix_model_id'] = settings_default['default'][
            'suffix_model_id']

    cmd = ' '.join(['python app.py', entrypoint, '%s'])

    all_jobs = []
    for all_arg in all_args:
        k, v = zip(*[(k, v) for k, v in all_arg.items()])
        all_jobs += [{kk: pp
                      for kk, pp in zip(k, p)} for p in itertools.product(*v)]
    while all_jobs:
        all_jobs = fill_gpu_jobs(
            all_jobs,
            logger,
            job_parser=lambda x: cmd % get_tmp_yaml(x, (os.environ[
                'suffix_model_id'] if os.environ['suffix_model_id'] else '+'.
                                                        join(test_set)) + '-'),
            wait_until_next=settings['common']['wait_until_next'],
            retry_delay=settings['common']['retry_delay'],
            do_shuffle=True)

    logger.info('all jobs are done!')
Ejemplo n.º 10
0
    def copy_dir_yml(self, dir_path, bundle):
        """
        Copy the yml files inside a directory to a bundle.

        :param dir_path: source directory
        :param bundle: destination bundle
        :return: None
        """
        scan_files, _ = get_yml_paths_in_dir(dir_path, error_msg='')
        content_files = 0
        dir_name = os.path.basename(dir_path)
        if scan_files:
            print(f"\nStarting process for {dir_path}")
        for path in scan_files:
            if not self.should_process_file_to_bundle(path, bundle):
                continue

            new_file_path = self.add_suffix_to_file_path(
                os.path.join(bundle, os.path.basename(path)))
            if len(os.path.basename(path)) >= self.file_name_max_size:
                self.long_file_names.append(path)

            ryaml = YAML()
            ryaml.allow_duplicate_keys = True
            with io.open(path, mode='r', encoding='utf-8') as file_:
                yml_info = ryaml.load(file_)
            ver = yml_info.get('fromversion', '0')
            updated_yml_info = self.add_from_version_to_yml(
                yml_content=yml_info, save_yml=False)
            if updated_yml_info:
                yml_info = updated_yml_info

            process_message = f' - processing: {path}'
            if ver != '0' and ver != '':
                process_message += f' - current fromversion: {ver}'
            print(process_message)
            if dir_name in ['Playbooks', 'TestPlaybooks']:
                # in TestPlaybook dir we might have scripts - all should go to test_bundle
                if dir_name == 'TestPlaybooks' and os.path.basename(
                        path).startswith('script-'):
                    self.copy_content_yml(path, new_file_path, yml_info)
                self.copy_playbook_yml(path, new_file_path)
            else:
                self.copy_content_yml(path, new_file_path, yml_info)

            content_files += 1

        if content_files > 0:
            print(f'Finished process - total files: {content_files}\n')
Ejemplo n.º 11
0
def load_hass_config(path):
    """Load the HASS config."""
    fname = os.path.join(path, 'configuration.yaml')

    yaml = YAML(typ='safe')
    # Compat with HASS
    yaml.allow_duplicate_keys = True
    # Stub HASS constructors
    HassSafeConstructor.name = fname
    yaml.Constructor = HassSafeConstructor

    with open(fname, encoding='utf-8') as conf_file:
        # If configuration file is empty YAML returns None
        # We convert that to an empty dict
        return yaml.load(conf_file) or {}
Ejemplo n.º 12
0
def yaml_to_obj(yaml_file):

    import codecs
    yaml = YAML()
    yaml.allow_duplicate_keys = True

    try:
        with codecs.open(yaml_file, 'rb', 'utf-8') as f:
            datas_dict = yaml.load(f)
            if not datas_dict:
                raise Exception("Please check the file: {}".format(yaml_file))
            return datas_dict

    except Exception as e:
        raise e
Ejemplo n.º 13
0
def parse_patch(in_yml, by_names=None, by_hash=None):
    with open(in_yml) as yml_patch:
        yml = YAML(typ="safe", pure=True)
        yml.allow_duplicate_keys = True
        units = dict(yml.load(yml_patch))

    funits = units

    if by_hash:
        funits = {pname: funits[pname] for ptype, pname in funits[by_hash]}

    if by_names:
        funits = {
            pname: funits[pname]
            for pname in funits if pname in by_names
        }

    return funits
Ejemplo n.º 14
0
    def merge_expand(self):
        yaml = YAML()
        yaml.Constructor.flatten_mapping = ruamel.yaml.SafeConstructor.flatten_mapping
        yaml.default_flow_style = False
        yaml.allow_duplicate_keys = True
        if not self._args.allow_anchors:
            yaml.representer.ignore_aliases = lambda x: True

        if self._args.file[0] == '-':
            data = yaml.load(sys.stdin)
        else:
            with open(self._args.file[0]) as fp:
                data = yaml.load(fp)
        if self._args.file[1] == '-':
            yaml.dump(data, sys.stdout)
        else:
            with open(self._args.file[1], 'w') as fp:
                yaml.dump(data, fp)
Ejemplo n.º 15
0
    def read_yaml(self, p_filename):
        """ Find the Yaml file and read it in.
        Save file location and source YAML

        @return: a ConfigYamlNodeInformation() filled in
        """
        l_node = self.find_config_node(p_filename)
        if l_node == None:
            LOG.info('Config file "{}" not found.'.format(p_filename))
            return None
        l_yaml = YAML(typ='rt')
        l_yaml.allow_duplicate_keys = True
        with open(l_node.YamlPath, 'r') as l_file:
            l_data = l_yaml.load(l_file)
            l_node.Yaml = l_data
        self.m_pyhouse_obj._Config.YamlTree[p_filename] = l_node
        # LOG.info('Loaded config file "{}" '.format(p_filename))
        # LOG.debug(PrettyFormatAny.form(self.m_pyhouse_obj._Config.YamlTree, 'Tree', 190))
        return l_node
Ejemplo n.º 16
0
def parse_signature(sig_file):

    yaml = YAML()
    yaml.allow_duplicate_keys = True
    with open(sig_file) as _sig_file:
        signature = _sig_file.read()

    sigs = yaml.load(signature)
    for sig in sigs:
        __sig = Signature(sig['name'])
        if 'proc_name' in sig.keys():
            __sig.proc_name = sig['proc_name']
        for func in sig['functions']:
            __func = Function(func['func_name'])
            for key, value in func['arguments'].items():
                __func.arg_dict[key] = value
            __sig.func_list.append(__func)

    return __sig
Ejemplo n.º 17
0
def load_settings(default_settings_file, override_settings_files):
    yaml = YAML()
    yaml.allow_duplicate_keys = False
    try:
        log("Loading common default settings from: " +
            DEFAULT_COMMON_SETTINGS_FILE)
        settings = dict(yaml.load(open(DEFAULT_COMMON_SETTINGS_FILE)))
        log("Loading default settings from: " + default_settings_file)
        settings.update(yaml.load(open(default_settings_file)))

        for settings_fpath in override_settings_files:
            log("Loading settings from: " + settings_fpath)
            override_settings = yaml.load(open(settings_fpath))
            settings.update(override_settings)
        log("Loaded settings.")
    except ruamel.yaml.constructor.DuplicateKeyError as ex:
        log(red(ex))
        log(red("Aborting!"))
        exit(1)

    return settings
Ejemplo n.º 18
0
Archivo: dict.py Proyecto: fantix/gen3
async def main(args, loop):
    if args.url.startswith("http"):
        log.critical("Downloading dictionary JSON...")
        async with aiohttp.ClientSession(loop=loop) as session:
            async with session.get(args.url) as resp:
                data = await resp.json()

    elif os.path.isfile(args.url):
        log.critical("Reading dictionary JSON...")
        with open(args.url) as f:
            data = json.load(f)

    else:
        log.critical("Reading dictionary YAML source...")
        data = {}
        yaml = YAML(typ="safe")
        yaml.allow_duplicate_keys = True
        for path in glob.glob(f"{args.url}/*.yaml"):
            with open(path) as f:
                data[os.path.basename(path)] = yaml.load(f)

    loader.load(data)
Ejemplo n.º 19
0
def _ruamel_yaml_fixer(source_code: str) -> str:
    """Run Ruamel's yaml fixer.

    Args:
        source_code: Source code to be corrected.

    Returns:
        Corrected source code.
    """
    # Configure YAML formatter
    yaml = YAML()
    yaml.indent(mapping=2, sequence=4, offset=2)
    yaml.allow_duplicate_keys = True
    yaml.explicit_start = True  # Start the document with ---
    source_dict = yaml.load(source_code)

    # Return the output to a string
    string_stream = StringIO()
    yaml.dump(source_dict, string_stream)
    source_code = string_stream.getvalue()
    string_stream.close()

    return source_code.strip()
Ejemplo n.º 20
0
from ruamel.yaml import YAML
from ruamel.yaml.constructor import DoubleQuotedScalarString

yaml = YAML()
yaml.preserve_quotes = True
yaml.allow_duplicate_keys = True
yaml.indent(mapping=2, sequence=4, offset=2)

DoubleQuotes = DoubleQuotedScalarString
Ejemplo n.º 21
0
    def save_yaml(path, data):
        ryaml = YAML()
        ryaml.allow_duplicate_keys = True

        with open(path, 'w') as f:
            ryaml.dump(data, f)
Ejemplo n.º 22
0
import sys
import os
import subprocess
import shlex
from riscv_isac.log import logger
import ruamel
from ruamel.yaml import YAML
from ruamel.yaml.representer import RoundTripRepresenter, SafeRepresenter
import yaml as pyyaml
from elftools.elf.elffile import ELFFile

yaml = YAML(typ="rt")
yaml.default_flow_style = False
yaml.explicit_start = True
yaml.allow_unicode = True
yaml.allow_duplicate_keys = False

safe_yaml = YAML(typ="safe")
safe_yaml.default_flow_style = False
safe_yaml.explicit_start = True
safe_yaml.allow_unicode = True
safe_yaml.allow_duplicate_keys = False


def collect_label_address(elf, label):
    with open(elf, 'rb') as f:
        elffile = ELFFile(f)
        # elfclass is a public attribute of ELFFile, read from its header
        symtab = elffile.get_section_by_name('.symtab')
        size = symtab.num_symbols()
        mains = symtab.get_symbol_by_name(label)
Ejemplo n.º 23
0
def prepare_recipe(package_dir, git_repos_dir, env_dir):
    assert os.path.exists(package_dir), package_dir
    assert not os.path.exists(git_repos_dir), git_repos_dir
    assert os.path.exists(env_dir), env_dir

    meta_path = os.path.join(package_dir, 'meta.yaml')
    with (open(meta_path, 'r+')) as meta_file:
        # Read 'meta.yaml' contents
        meta_contents = meta_file.read()
        original_meta = meta_contents

        # Load yaml with mostly dummy Jinja2 structures used in Conda recipes
        def _pin_compatible(package_name,
                            min_pin='x.x.x.x.x.x',
                            max_pin='x',
                            lower_bound=None,
                            upper_bound=None):
            return ''

        def _pin_subpackage(package_name,
                            min_pin='x.x.x.x.x.x',
                            max_pin='x',
                            exact=False):
            return ''

        conda_context = {
            'environ': os.environ,
            'os': os,
            'GIT_BUILD_STR': '',
            'GIT_DESCRIBE_HASH': '',
            'GIT_DESCRIBE_NUMBER': '',
            'GIT_DESCRIBE_TAG': '',
            'GIT_FULL_HASH': '',
            'compiler': lambda _: '',
            'pin_compatible': _pin_compatible,
            'pin_subpackage': _pin_subpackage,
            'resolved_packages': lambda _: [],
        }
        jinja_rendered_meta = jinja2.Template(meta_contents).render(
            conda_context)

        safe_yaml = YAML(typ='safe')
        safe_yaml.allow_duplicate_keys = True
        # Yaml loader doesn't like [OS] after quoted strings (which are OK for Conda)
        # Quotes are removed before loading as they are irrelevant at this point
        meta = safe_yaml.load(jinja_rendered_meta.replace('"', ''))

        if len(list(find("git_url", meta))) < 1:
            print(
                'No git repositories in the package recipe; tag rewriting will be skipped.'
            )
            print()
        else:
            sources = meta['source']

            # Make sources a one-element list if it's not a list
            if not isinstance(sources, list):
                sources = [sources]

            if 'git_url' not in sources[0]:
                print(
                    "First source isn't a git repository; tag rewriting will be skipped."
                )
                print()
            else:
                # Clone sources and make conda use always those
                print('Cloning git sources...')
                print()

                os.mkdir(git_repos_dir)
                first_git_repo_path = None

                for src in sources:
                    # The recipe can have some mix of git and non-git sources
                    if 'git_url' in src:
                        local_git_url = _prepare_single_source(
                            git_repos_dir, src)
                        meta_contents = meta_contents.replace(
                            f"git_url: {src['git_url']}",
                            f"git_url: {local_git_url}")
                        if first_git_repo_path is None:
                            first_git_repo_path = local_git_url

                # Set version based on modified git repo
                print('Modifying git tags to set proper package version...')

                git_rewrite_tags(first_git_repo_path)
                _add_extra_tags_if_exist(package_dir, first_git_repo_path)
                version = git_describe(first_git_repo_path).replace('-', '_')
                meta_contents = re.sub(r'(\s+version:).+',
                                       r'\1 ' + str(version), meta_contents)

                # Reset 'meta.yaml' and save metadata without GIT_* vars
                meta_file.seek(0)
                meta_file.truncate()
                meta_file.write(meta_contents)

    # Render metadata
    meta = render_metadata(package_dir, env_dir)

    # Embed script_envs in the environment
    print("Embedding 'build/script_env' variables in the environment...")

    if 'build' in meta.keys() and 'script_env' in meta['build'].keys():
        env_vars = meta['build']['script_env']
        assert isinstance(env_vars, list), env_vars

        vars_string = ''
        env_vars_set = []
        for env_var in env_vars:
            if env_var in os.environ.keys():
                vars_string += f"{env_var}={os.environ[env_var]} "
                env_vars_set.append(env_var)
            else:
                print(
                    f"{env_var} variable isn't set; won't be allowed during building."
                )
        if vars_string:
            _call_conda_cmd_in_env(f"conda env config vars set {vars_string}",
                                   env_dir)
        meta['build']['script_env'] = env_vars_set

    # Save rendered recipe as meta.yaml
    meta_path = os.path.join(package_dir, 'meta.yaml')
    with open(meta_path, 'r+') as meta_file:
        meta_lines = meta_file.readlines()

        # Restore `{{ compiler('c/cxx') }}` even though their corresponding packages have already
        # been added to the recipe. Their presence has some additional influence on `conda-build`.
        for lang, extra_specifiers in re.findall(
                r'''
                \{\{\s*
                compiler
                [\(\'"\s]+      # (' or (" with optional spaces
                ([a-zA-Z]+)     # lang (c/cxx/...)
                [\)\'"\s]+      # ') or ") with optional spaces
                \}\}
                (.*)            # extra_specifiers (e.g. 4.0 [linux])
                ''', ''.join(meta_lines), re.VERBOSE):
            # OS specifier will only work in quoted string if it's after '#' (double '#' is OK)
            extra_specifiers = extra_specifiers.replace('[', '# [')
            # lang has to be surrounded by double quotes; PyYaml dumps single ones wrong for Conda
            yaml_compiler = '{{ compiler("' + lang + '") }}' + extra_specifiers
            # In case there's no such section for the current OS ({{ compiler }} is for other OS)
            if 'build' not in meta['requirements']:
                meta['requirements']['build'] = []
            meta['requirements']['build'].append(yaml_compiler)

        meta_file.seek(0)
        meta_file.write('# Rendered by conda-build-prepare\n')
        meta_file.write(
            '# Original meta.yaml can be found at the end of this file\n')
        meta_file.write('\n')

        # Convert local git_urls with cygpath, if available
        if sys.platform in ['cygwin', 'msys', 'win32']:
            if isinstance(meta['source'], list):
                for src in meta['source']:
                    _try_cygpath_on_git_url(src)
            else:
                _try_cygpath_on_git_url(meta['source'])
        yaml.dump(meta, meta_file)
        meta_file.write('\n')

        # Save original meta.yaml contents as a comment at the end
        meta_file.write('# Original meta.yaml:\n')
        meta_file.write('#\n')
        meta_file.write('# ' + original_meta.replace('\n', '\n# ')[:-2])
Ejemplo n.º 24
0
"""Tools handle YAML files for Supervisor."""
import logging
from pathlib import Path

from atomicwrites import atomic_write
from ruamel.yaml import YAML, YAMLError

from ..exceptions import YamlFileError

_YAML = YAML(typ="safe")
_YAML.allow_duplicate_keys = True

_LOGGER: logging.Logger = logging.getLogger(__name__)


def read_yaml_file(path: Path) -> dict:
    """Read YAML file from path."""
    try:
        return _YAML.load(path) or {}

    except (YAMLError, AttributeError) as err:
        raise YamlFileError(f"Can't read YAML file {path!s} - {err!s}",
                            _LOGGER.error) from err


def write_yaml_file(path: Path, data: dict) -> None:
    """Write a YAML file."""
    try:
        with atomic_write(path, overwrite=True) as fp:
            _YAML.dump(data, fp)
        path.chmod(0o600)
Ejemplo n.º 25
0
from logging import getLogger

logger = getLogger(__name__)

from knack.util import CLIError
from knack.help_files import helps
from azure.cli.core.mock import DummyCli
from azure.cli.core.util import get_installed_cli_distributions
from azure.cli.core._help import CliCommandHelpFile, CliGroupHelpFile
from azure.cli.core.file_util import _store_parsers, _is_group

try:
    from ruamel.yaml import YAML
    yaml = YAML()
    yaml.width = 1000  # prevents wrapping around in dumper.
    yaml.allow_duplicate_keys = True  # TODO: allow duplicate keys within help entries. see az container create. Remove this.
except ImportError as e:
    msg = "{}\npip install ruamel.Yaml to use this script.".format(e)
    exit(msg)

PACKAGE_PREFIX = "azure.cli.command_modules"
CLI_PACKAGE_NAME = 'azure-cli'
COMPONENT_PREFIX = 'azure-cli-'

failed = 0

loaded_helps = {}


def get_all_help(cli_ctx):
    invoker = cli_ctx.invocation
Ejemplo n.º 26
0
from git import Repo
import github
import os
from .utils import tmp_directory
from conda_smithy.github import configure_github_team
import textwrap
from functools import lru_cache

from ruamel.yaml import YAML

YAML_JINJA2 = YAML(typ='jinja2')
YAML_JINJA2.indent(mapping=2, sequence=4, offset=2)
YAML_JINJA2.width = 160
YAML_JINJA2.allow_duplicate_keys = True


@lru_cache(maxsize=None)
def get_filter_out_members():
    gh = github.Github(os.environ['GH_TOKEN'])
    org = gh.get_organization('conda-forge')
    teams = ['staged-recipes', 'help-r']
    gh_teams = list(team for team in org.get_teams() if team.name in teams)
    members = set()
    for team in gh_teams:
        members.update([m.login for m in team.get_members()])
    return members


def filter_members(members):
    out = get_filter_out_members()
    return [m for m in members if m not in out]
Ejemplo n.º 27
0
 def __init__(self, meta_yaml):
     _yml = YAML(typ='jinja2')
     _yml.indent(mapping=2, sequence=4, offset=2)
     _yml.width = 160
     _yml.allow_duplicate_keys = True
     self.meta = _yml.load(meta_yaml)
Ejemplo n.º 28
0
def main(argv):
    args = parse_args()

    ## check the yaml of these files because ruamel pythin lib has issues with loading em
    yaml_files_check_list = [
        'ml-operator/values.yaml', 'emailnotifier/values.yaml'
    ]

    ports_array = {
        "simapi":
        "3000",
        "reportapi":
        "3002",
        "testapi":
        "3003",
        "https":
        "80",
        "http":
        "80",
        "http-admin":
        "4001",
        "http-api":
        "4002",
        "mysql":
        "3306",
        "mongodb":
        "27017",
        "inboundapi":
        "{{ $config.config.schemeAdapter.env.INBOUND_LISTEN_PORT }}",
        "outboundapi":
        "{{ $config.config.schemeAdapter.env.OUTBOUND_LISTEN_PORT }}"
    }

    p = Path() / args.directory
    print(f"Processing helm charts in directory: [{args.directory}]")
    yaml = YAML()
    yaml.allow_duplicate_keys = True
    yaml.preserve_quotes = True
    yaml.width = 4096

    # walk the directory structure and process all the values.yaml files
    # replace solsson kafka with kymeric
    # replace kafa start up check with netcat test (TODO check to see if this is ok)
    # replace mysql with arm version of mysql and adjust tag on the following line (TODO: check that latest docker mysql/mysql-server latest tag is ok )
    # TODO: maybe don't do this line by line but rather read in the entire file => can match across lines and avoid the next_line_logic
    # for now disable metrics and metrics exporting
    # replace the mojaloop images with the locally built  ones

    if (args.all or args.values):
        print(
            "\n\n============================================================="
        )
        print("Processing values.yaml files.. ")
        print("=============================================================")

        for vf in p.rglob('*/values.yaml'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            print(f"{vf} : {backupfile}")
            copyfile(vf, backupfile)
            with FileInput(files=[vf], inplace=True) as f:
                next_line_is_mojaloop_tag = False
                for line in f:
                    line = line.rstrip()

                    # now update the mojaloop images
                    if (next_line_is_mojaloop_tag):
                        line = re.sub("tag:.*$", "tag: latest", line)
                        next_line_is_mojaloop_tag = False
                    # TODO : check that there is no mojaloop image with > 3 parts to its name i.e. > 3 hypens
                    if re.match(r"(\s+)repository:\s*mojaloop", line):
                        line = re.sub(
                            r"(\s+)repository:\s*mojaloop/(\w+)-(\w+)-(\w+)-(\w+)",
                            r"\1repository: \2_\3_\4_\5_local", line)
                        line = re.sub(
                            r"(\s+)repository:\s*mojaloop/(\w+)-(\w+)-(\w+)",
                            r"\1repository: \2_\3_\4_local", line)
                        line = re.sub(
                            r"(\s+)repository:\s*mojaloop/(\w+)-(\w+)",
                            r"\1repository: \2_\3_local", line)
                        line = re.sub(r"(\s+)repository:\s*mojaloop/(\w+)",
                                      r"\1repository: \2_local", line)
                        next_line_is_mojaloop_tag = True

                    print(line)

    ## TODO  Need to modify the kafka requirements.yaml to update the zookeeper image
    ##       if I am fully automating this
    # walk the directory structure and process all the requirements.yaml files
    # kafka => local kafka chart
    # mysql/percona => local mysql chart with later arm64 based image
    # zookeeper => local zookeeper (this is in the requirements.yaml of the kafka local chart)

    if (args.all or args.requirements):
        print(
            "\n\n============================================================="
        )
        print("Processing requirements.yaml files ")
        print("=============================================================")
        for rf in p.rglob('*/requirements.yaml'):
            backupfile = Path(rf.parent) / f"{rf.name}_bak"
            print(f"{rf} : {backupfile}")
            copyfile(rf, backupfile)
            with open(rf) as f:
                reqs_data = yaml.load(f)
                #print(reqs_data)
            try:
                dlist = reqs_data['dependencies']
                for i in range(len(dlist)):
                    if (dlist[i]['name'] == "percona-xtradb-cluster"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['name'] = "mysql"
                        dlist[i]['version'] = "1.0.0"
                        dlist[i]['repository'] = "file://../mysql"
                        dlist[i]['alias'] = "mysql"
                        dlist[i]['condition'] = "enabled"
                        print(f"new is: {dlist[i]}")

                    if (dlist[i]['name'] == "kafka"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['repository'] = "file://../kafka"
                        dlist[i]['version'] = "1.0.0"
                        print(f"new is: {dlist[i]}")

                    if (dlist[i]['name'] == "zookeeper"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['version'] = "1.0.0"
                        dlist[i]['repository'] = "file://../zookeeper"
                        print(f"new is: {dlist[i]}")

                    if (dlist[i]['name'] == "mongodb"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['version'] = "1.0.0"
                        dlist[i]['repository'] = "file://../mongodb"
                        print(f"new is: {dlist[i]}")
            except Exception:
                continue
            #print(yaml.dump(reqs_data))
            with open(rf, "w") as f:
                yaml.dump(reqs_data, f)

    if (args.testonly):
        print(
            "\n\n==============================================================="
        )
        print("running toms code tests")
        print(
            "===============================================================")

        for vf in p.rglob('*/values.yaml'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            # print(f"{vf} : {backupfile}")
            copyfile(vf, backupfile)

            with open(vf) as f:
                skip = False
                for fn in yaml_files_check_list:
                    if vf == Path(fn):
                        print(
                            f"This yaml file needs checking skipping load/processing for now =>  {Path(fn)} "
                        )
                        skip = True
                if not skip:
                    print(f"      Loading yaml for ==> {vf.parent}/{vf.name}",
                          end="")
                    data = yaml.load(f)
                    print("  :[ok]")

            # update kafka settings
            count = 0
            for x, value in lookup("kafka", data):
                #print_debug(x,value)
                list(
                    update_key(
                        'command',
                        'until nc -vz -w 1 $kafka_host $kafka_port; do echo waiting for Kafka; sleep 2; done;',
                        value))
                list(update_key('repository', 'kymeric/cp-kafka', value))
                list(update_key('image', 'kymeric/cp-kafka', value))
                list(update_key('imageTag', 'latest', value))

            # turn off prometheus jmx and kafka exporter
            for x, value in lookup("prometheus", data):
                #print_debug(x,value , 2)
                if isinstance(value, dict):
                    if value.get("jmx"):
                        value['jmx']['enabled'] = False
                    if value.get("kafka"):
                        value['kafka']['enabled'] = False

            # update mysql settings
            for x, value in lookup("mysql", data):
                list(update_key('repository', 'mysql/mysql-server', value))
                list(update_key('tag', '8.0.28-1.2.7-server', value))
                if value.get("image"):
                    del value['image']
                    value['image'] = "mysql/mysql-server"
                    value['imageTag'] = "8.0.28-1.2.7-server"
                    value['pullPolicy'] = "ifNotPresent"

            # turn the side car off for the moment
            for x, value in lookup("sidecar", data):
                list(update_key('enabled', False, value))

            # turn metrics off
            # The simulator has metrics clause with no enabled setting  => hence need to test
            for x, value in lookup("metrics", data):
                try:
                    if value.get("enabled"):
                        value['enabled'] = False
                except Exception:
                    continue

            with open(vf, "w") as f:
                yaml.dump(data, f)

    if (args.ingress):
        print(
            "\n\n======================================================================================"
        )
        print(" Modify charts to implement networking/v1 ")
        print(
            " and to use bitnami mysql rather than percona (percona / busybox is broken on containerd) "
        )
        print(
            "==========================================================================================="
        )

        # modify the template files
        for vf in p.rglob('*.tpl'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            #print(f"{vf} : {backupfile}")
            #copyfile(vf, backupfile)
            with FileInput(files=[vf], inplace=True) as f:
                #with fileinput.input(files=([vf]), inplace=True)  as f:
                for line in f:
                    line = line.rstrip()
                    #replace networking v1beta1
                    line = re.sub(r"networking.k8s.io/v1beta1",
                                  r"networking.k8s.io/v1", line)
                    line = re.sub(r"extensions/v1beta1",
                                  r"networking.k8s.io/v1", line)
                    print(line)

        # modify the ingress.yaml files
        for vf in p.rglob('*/ingress.yaml'):
            backupfile = Path(vf.parent) / f"{vf.name}_bak"
            #print(f"{vf} : {backupfile}")
            #copyfile(vf, backupfile)

            with FileInput(files=[vf], inplace=True) as f:
                for line in f:
                    line = line.rstrip()
                    if re.search("path:", line):
                        line_dup = line
                        line_dup = re.sub(
                            r"- path:.*$",
                            r"  pathType: ImplementationSpecific", line_dup)
                        print(line)
                        print(line_dup)
                    elif re.search("serviceName:", line):
                        line_dup = line
                        line_dup = re.sub(r"serviceName:.*$", r"service:",
                                          line_dup)
                        print(line_dup)
                        line = re.sub(r"serviceName:", r"  name:", line)
                        print(line)
                    elif re.search("servicePort:", line):
                        line_dup = line
                        line_dup = re.sub(r"servicePort:.*$", r"  port:",
                                          line_dup)
                        line = re.sub(r"servicePort: ", r"    number: ", line)
                        # need to replace port names with numbers
                        for pname, pnum in ports_array.items():
                            line = re.sub(f"number: {pname}$",
                                          f"number: {pnum}", line)
                        print(line_dup)
                        print(line)
                        #servicePort {{ .Values.containers.api.service.ports.api.externalPort }}
                    elif re.search("spec:", line):
                        print(line)
                        print(
                            "  ingressClassName: public"
                        )  # well at least it is "public" for microk8s v1.22 => TODO fully figure the chamges and settings out here and simplify!
                    else:
                        print(line)

        for vf in p.rglob('*/values.yaml'):
            with open(vf) as f:

                #print(f"{vf.parent}/{vf.name}")
                skip = False
                for fn in yaml_files_check_list:
                    if vf == Path(fn):
                        print(
                            f"This yaml file needs checking skipping load/processing for now =>  {Path(fn)} "
                        )
                        skip = True
                if not skip:
                    #print(f"      Loading yaml for ==> {vf.parent}/{vf.name}", end="")
                    data = yaml.load(f)
                    #print("  :[ok]")

                for x, value in lookup("mysql", data):
                    list(update_key('enabled', 'true', value))
                # => use these for now
                # TODO: update to later DB and get rid of default passwords
                for x, value in lookup("mysql", data):
                    list(update_key('repository', 'mysql/mysql-server', value))
                    list(update_key('tag', '5.6', value))
                    if value.get("image"):
                        del value['image']
                        value['image'] = "mysql"
                        value['imageTag'] = '8.0'
                        value['pullPolicy'] = "ifNotPresent"

                ### need to set nameOverride  for mysql for ml-testing-toolkit as it appears to be missing
                if vf == Path('mojaloop/values.yaml'):
                    print("Updating the ml-testing-toolkit / mysql config ")
                    for x, value in lookup("ml-testing-toolkit", data):
                        value['mysql'] = {"nameOverride": "ttk-mysql"}

            with open(vf, "w") as f:
                yaml.dump(data, f)

        # versions of k8s -> 1.20 use containerd not docker and the percona chart
        # or at least the busybox dependency of the percona chart has an issue
        # so just replace the percona chart with the mysql charts
        #  for now using the old one because it deploys => TODO fix this and update
        for rf in p.rglob('*/requirements.yaml'):
            with open(rf) as f:
                reqs_data = yaml.load(f)
                #print(reqs_data)
            try:
                dlist = reqs_data['dependencies']
                for i in range(len(dlist)):
                    if (dlist[i]['name'] == "percona-xtradb-cluster"):
                        print(f"old was: {dlist[i]}")
                        dlist[i]['name'] = "mysql"
                        #dlist[i]['version'] = "8.8.8"
                        #dlist[i]['repository'] = "https://charts.bitnami.com/bitnami"
                        dlist[i]['version'] = 8.0
                        dlist[i][
                            'repository'] = "https://charts.bitnami.com/bitnami"
                        dlist[i]['alias'] = "mysql"
                        dlist[i]['condition'] = "enabled"
                        print(f"new is: {dlist[i]}")

                    # if (dlist[i]['name'] == "mongodb"):
                    #     print(f"old was: {dlist[i]}")
                    #     dlist[i]['version'] = "11.1.7"
                    #     dlist[i]['repository'] = "file://../mongodb"
                    #     print(f"new is: {dlist[i]}")
            except Exception:
                continue

            with open(rf, "w") as f:
                yaml.dump(reqs_data, f)
Ejemplo n.º 29
0
from logging import getLogger

logger = getLogger(__name__)

from knack.util import CLIError
from knack.help_files import helps
from azure.cli.core.mock import DummyCli
from azure.cli.core.util import get_installed_cli_distributions
from azure.cli.core._help import CliCommandHelpFile, CliGroupHelpFile
from azure.cli.core.file_util import _store_parsers, _is_group

try:
    from ruamel.yaml import YAML
    yaml = YAML()
    yaml.width = 1000 # prevents wrapping around in dumper.
    yaml.allow_duplicate_keys = True # TODO: allow duplicate keys within help entries. see az container create. Remove this.
except ImportError as e:
    msg = "{}\npip install ruamel.Yaml to use this script.".format(e)
    exit(msg)

PACKAGE_PREFIX = "azure.cli.command_modules"
CLI_PACKAGE_NAME = 'azure-cli'
COMPONENT_PREFIX = 'azure-cli-'

failed = 0

loaded_helps = {}

def get_all_help(cli_ctx):
    invoker = cli_ctx.invocation
    help_ctx = cli_ctx.help_cls(cli_ctx)
Ejemplo n.º 30
0
def parseRuleFile(filename):
    with open(filename) as file:
        yaml = YAML()
        yaml.allow_duplicate_keys = True
        data = list(yaml.load_all(file))
        return data
Ejemplo n.º 31
0
import re
from typing import Optional, List, Any, Dict
import os
from collections import namedtuple
import hashlib
from io import StringIO
import subprocess
import pathlib

from ruamel.yaml import YAML, comments
import json
import requests
from jinja2 import Template

yaml = YAML()
yaml.allow_duplicate_keys = True  # https://github.com/istio/istio/issues/2330

from .jsonnet import Jsonnet

CONFIG_NAME = 'Kasanefile'
LOCKFILE_NAME = 'Kasanefile.lock'

RuntimeConfig = namedtuple('RuntimeConfig',
                           ['check_hashes', 'jsonnet', 'kubeconfig'])


class RemoteNotVendoredError(RuntimeError):
    def __init__(self, layer):
        super().__init__()
        self.layer = layer