コード例 #1
0
def load_from_folder(config_folder_location: str) -> Configuration:
    """Return a configuration based on a folder location.

    :param config_folder_location:
        String of folder path to be read.
    :returns:
        Configuration object.
    """
    logger = logging.getLogger(__name__)
    logger.info(
        f'Attempting to load config from folder {config_folder_location}')

    if os.path.isdir(config_folder_location) is False:
        logger.critical(f'{config_folder_location} is not a directory')
        raise ValueError('Invalid configuration folder specified.')

    os.chdir(config_folder_location)
    files = glob.glob('*.yaml')
    files.extend(glob.glob('*.yml'))
    files = set(files) - set(glob.glob('*example*'))
    for file in files:
        logger.info(f'Found file {file}')

    # Forcing INFO level logging here, debug will print file contents which might leak secrets
    conf = hiyapyco.load(list(files),
                         method=hiyapyco.METHOD_MERGE,
                         mergelists=False,
                         loglevel='INFO')
    configuration = _build_configuration(yaml.safe_load(hiyapyco.dump(conf)))
    return configuration
コード例 #2
0
ファイル: __init__.py プロジェクト: apogrebnyak/infinilint
def read_configs():
    script_path = os.path.dirname(os.path.realpath(__file__))
    base_config_file = os.path.join(script_path, 'base.yml')
    return hiyapyco.load(base_config_file,
                         '.infinilint.yml',
                         method=hiyapyco.METHOD_MERGE,
                         failonmissingfiles=False)
コード例 #3
0
    def __read_config_files(self):
        """Private method to read all existing config YAML files an create a new ZAP Configuration object"""

        if self.config_dir is not None and len(self.config_dir) > 0:
            logging.debug("ZAP YAML config dir: '%s'", self.config_dir)
            config_files = glob.glob(self.config_dir_glob)
        else:
            logging.warning(
                "YAML config dir not found! This is no problem but possibly not intendend here."
            )
            config_files = []

        logging.info("Importing YAML files for ZAP configuration at dir: '%s'",
                     config_files)
        if (len(config_files) > 0):
            config_files.sort()
            self.__config = hiyapyco.load(*config_files,
                                          method=hiyapyco.METHOD_MERGE,
                                          interpolate=True,
                                          mergelists=True,
                                          failonmissingfiles=False)
            logging.debug("Finished importing YAML: %s", self.__config)

            self.__parse_configurations()
        else:
            logging.warning(
                "No ZAP YAML Configuration files found :-/ This is no problem but possibly not intendend here."
            )
            self.__config = collections.OrderedDict()
コード例 #4
0
def load_yaml(filepath, defaults={"do_permutation": True}):
    """ load placer settings

    Args:
        filepath: a yaml file containing the does and placer information

    Returns:
        a dictionnary of DOEs with:
        {
            doe_name1: {...}
            doe_name2: {...}
            ...
        }

    """

    does = {}
    data = hiyapyco.load(str(filepath))
    mask = data.pop("mask")

    if "layer_doe_label" not in mask:
        mask["layer_doe_label"] = (102, 6)

    for doe_name, doe in data.items():
        # do_permutation = defaults["do_permutation"]
        # if "do_permutation" in doe:
        # do_permutation = doe.pop("do_permutation")
        _doe = {}
        _doe.update(doe)
        does[doe_name] = _doe
    return does, mask
コード例 #5
0
ファイル: config.py プロジェクト: joamatab/gcli
def read_config(path=local_config_path):
    """ reads CONFIG dict from disk """
    if os.path.exists(path):
        return hiyapyco.load(path,
                             failonmissingfiles=False,
                             loglevelmissingfiles=logging.DEBUG)
    else:
        create_local_config(path)
        read_config(path)
コード例 #6
0
ファイル: io.py プロジェクト: vacany/lidar-intensity
def load_multi_yml(filename, merge=True):
    basename = osp.dirname(filename)
    with open(filename, 'r', encoding='utf-8') as f:
        data = yaml.safe_load(f)
    files = [osp.join(basename, d) for d in data]
    return hiyapyco.load(
        *files,
        method=hiyapyco.METHOD_MERGE if merge else hiyapyco.METHOD_SIMPLE,
        mergelists=False,
        usedefaultyamlloader=False)
コード例 #7
0
def main():
    confs = sys.argv[1:]
    conf = hiyapyco.load(confs,
                         method=hiyapyco.METHOD_MERGE,
                         mergelists=False,
                         failonmissingfiles=False)
    try:
        print(conf)
    except (KeyboardInterrupt, SystemExit):
        pass
コード例 #8
0
ファイル: main.py プロジェクト: longchangvn/riskprofileapp
def merge_profile(ndis_id):
    filename = f"{ndis_id}.yaml"
    local_filepath = os.path.join(WORKING_DIR, "profiles", filename)
    remote_filepath = os.path.join(
        WORKING_DIR, "remote",
        filename)  # TODO(divv) This path needs to be configurable

    with open(local_filepath, 'r') as f:
        local_yaml = f.read()
    logger.debug(f"**** LOCAL YAML ****:\n{local_yaml}")

    if os.path.isfile(remote_filepath):
        logger.info("Synchronising profiles")

        with open(remote_filepath, 'r') as f:
            remote_yaml = f.read()
        logger.debug(f"**** REMOTE YAML ****:\n{remote_yaml}")

        merged_hypc = hiyapyco.load([remote_yaml, local_yaml],
                                    method=hiyapyco.METHOD_MERGE,
                                    loglevel=logging.INFO,
                                    mergelists=False)
        merged_yaml = hiyapyco.dump(merged_hypc)
        logger.debug(f"**** MERGED YAML ****:\n{merged_yaml}")

        merged_dict = yaml.load(merged_yaml)
        for survey, risks in merged_dict["surveys"].items():
            logger.debug(f"Survey: {survey}")
            logger.debug(f"Risks: {risks}")
            for risk, observations in risks.items():
                logger.debug(f"Risk: {risk}")
                logger.debug(f"observations: {observations}")
                deduped_observations = dedupe_list_of_dicts(observations)
                logger.debug(f"Deduped observations: {deduped_observations}")
                merged_dict["surveys"][survey][risk] = deduped_observations

        logger.debug(f"Overwriting local and remote profiles for {ndis_id}")
        with open(local_filepath, 'w') as f:
            f.write(yaml.dump(merged_dict))
        with open(remote_filepath, 'w') as f:
            f.write(yaml.dump(merged_dict))

        # cleanup
        del (remote_yaml)
        del (merged_dict)
        del (merged_hypc)
        del (merged_yaml)
        logger.debug("deleted remote, merged dictionaries")
    else:  # This is a new profile that does not yet exist on the remote server
        with open(remote_filepath, 'w') as f:
            f.write(local_yaml)

    # cleanup
    del (local_yaml)
    logger.debug("deleted local dictionary")
コード例 #9
0
ファイル: utils.py プロジェクト: compbiocore/refchef
def merge_yaml(master, new):
    """Merge yaml files"""
    conf = hiyapyco.load(master,
                         new,
                         method=hiyapyco.METHOD_MERGE,
                         interpolate=True,
                         failonmissingfiles=True,
                         mergelists=False,
                         loglevel='ERROR')

    save_yaml(conf, master)
コード例 #10
0
ファイル: __main__.py プロジェクト: fluffy-cat/lull
def main():
    logging.config.fileConfig('/config/logging.ini')
    confs = sys.argv[1:]
    conf = hiyapyco.load(confs, method=hiyapyco.METHOD_MERGE, mergelists=False, failonmissingfiles=False)
    switch = create_standby_switch(conf['home_assistant_switch'])
    monitors = create_monitors(conf['monitors'])
    clock = time
    monitor_store = create_monitor_storage(conf, clock, monitors)
    controller = create_control_loop(conf, monitor_store, switch, clock)
    try:
        controller.start()
    except (KeyboardInterrupt, SystemExit):
        logging.warning('Control loop interrupted. Shutting down lull monitor')
        pass
コード例 #11
0
ファイル: setup.py プロジェクト: adragolov/qset-core
def setup_logging(config_file_paths: [str] = None,
                  default_level=logging.NOTSET):

    if config_file_paths is not None:
        for file_path in config_file_paths:
            if not os.path.exists(file_path):
                raise IOError(
                    f"Logging file '{file_path}' could not be found.")

    if config_file_paths is not None and len(config_file_paths) > 0:
        logging_config = hiyapyco.load(*config_file_paths)
        logging.config.dictConfig(logging_config)
    else:
        logging.basicConfig(level=default_level)
コード例 #12
0
def assemble_subdies_from_yaml(filepath,
                               subdies_directory,
                               mask_directory=None):
    data = hiyapyco.load(str(filepath))
    mask = data.pop("mask")
    mask_name = mask["name"]

    # Remaining entries are subdies
    dict_subdies = {
        k: (v["x"], v["y"], v["R"] if "R" in v else 0)
        for k, v in data.items()
    }

    return assemble_subdies(mask_name, dict_subdies, subdies_directory,
                            mask_directory)
コード例 #13
0
    def _load_config(self):
        """ Loads the local configuration """
        # load the global config
        self._load_global_config()

        merge_yaml = [self.config_global_path]

        # Load the local config
        if self.config_global_local_path:
            config = yaml.load(open(self.config_global_local_path, 'r'))
            self.config_global_local = config
            if config:
                merge_yaml.append(self.config_global_local_path)

        self.config = hiyapyco.load(*merge_yaml, method=hiyapyco.METHOD_MERGE)
コード例 #14
0
ファイル: config.py プロジェクト: cdcarter/CumulusCI
    def _load_config(self):
        """ Loads the local configuration """
        # load the global config
        self._load_global_config()

        merge_yaml = [self.config_global_path]

        # Load the local config
        if self.config_global_local_path:
            config = yaml.load(open(self.config_global_local_path, 'r'))
            self.config_global_local = config
            if config:
                merge_yaml.append(self.config_global_local_path)

        self.config = hiyapyco.load(*merge_yaml, method=hiyapyco.METHOD_MERGE)
コード例 #15
0
ファイル: __main__.py プロジェクト: fluffy-cat/plexpost
def main():
    confs = [sys.argv[1], sys.argv[2]]
    conf = hiyapyco.load(confs, method=hiyapyco.METHOD_MERGE, mergelists=False, failonmissingfiles=False)
    transmission = create_transmission(conf['transmission'])
    sftp = sftp_factory.SFTPFactory(conf['sftp'])
    switch = create_htpc_switch(conf['home_assistant'])
    scheduler = BlockingScheduler()
    if 'default_flow' in conf:
        create_schedule(scheduler, transmission, switch, sftp, default_flow.DefaultPostProcessor(conf['default_flow']))
    if 'movies_flow' in conf:
        create_schedule(scheduler, transmission, switch, sftp, movies_flow.MoviePostProcessor(conf['movies_flow']))
    if 'tv_flow' in conf:
        create_schedule(scheduler, transmission, switch, sftp, show_flow.ShowPostProcessor(conf['tv_flow']))
    try:
        scheduler.start()
    except (KeyboardInterrupt, SystemExit):
        pass
コード例 #16
0
    def _load_config(self):
        """ Loads the configuration for the project """
        # Verify that we're in a project
        repo_root = self.repo_root
        if not repo_root:
            raise NotInProject(
                "No repository found in current path.  You must be inside a repository to initialize the project configuration"
            )

        # Verify that the project's root has a config file
        if not self.config_project_path:
            raise ProjectConfigNotFound(
                "The file {} was not found in the repo root: {}".format(
                    self.config_filename, repo_root))

        # Start the merged yaml config from the global and global local configs
        merge_yaml = [self.global_config_obj.config_global_path]
        if self.global_config_obj.config_global_local_path:
            merge_yaml.append(self.global_config_obj.config_global_local_path)

        # Load the project's yaml config file
        with open(self.config_project_path, "r") as f_config:
            project_config = yaml.load(f_config)
        if project_config:
            self.config_project.update(project_config)
            merge_yaml.append(self.config_project_path)

        # Load the local project yaml config file if it exists
        if self.config_project_local_path:
            with open(self.config_project_local_path, "r") as f_local_config:
                local_config = yaml.load(f_local_config)
            if local_config:
                self.config_project_local.update(local_config)
                merge_yaml.append(self.config_project_local_path)

        # merge in any additional yaml that was passed along
        if self.additional_yaml:
            additional_yaml_config = yaml.load(self.additional_yaml)
            if additional_yaml_config:
                self.config_additional_yaml.update(additional_yaml_config)
                merge_yaml.append(self.additional_yaml)

        self.config = hiyapyco.load(*merge_yaml,
                                    method=hiyapyco.METHOD_MERGE,
                                    loglevel="INFO")
コード例 #17
0
def load_config(config_file=None, extra_vars=[]):
    if config_file is not None:
        config_locations.append(config_file.name)

    # transform extra_vars to yaml and append at the end (highest prio)
    extra_vars_dict = dict(stmt.split('=', 2) for stmt in extra_vars)
    config_locations.append(yaml.safe_dump(extra_vars_dict))
    config = hiyapyco.load(config_locations,
                           method=hiyapyco.METHOD_MERGE,
                           interpolate=True,
                           failonmissingfiles=False)

    # add default rule if mappings are missing
    if config.get('mappings', None) is None:
        config['mappings'] = [{'directory': '.', 'include': '**'}]

    logger.debug(config)
    return config
コード例 #18
0
ファイル: config.py プロジェクト: cdcarter/CumulusCI
    def _load_config(self):
        """ Loads the configuration for the project """

        # Initialize the dictionaries for the individual configs
        self.config_project = {}
        self.config_project_local = {}

        # Verify that we're in a project
        repo_root = self.repo_root
        if not repo_root:
            raise NotInProject('No repository found in current path.  You must be inside a repository to initialize the project configuration')

        # Verify that the project's root has a config file
        if not self.config_project_path:
            raise ProjectConfigNotFound(
                'The file {} was not found in the repo root: {}'.format(
                    self.config_filename,
                    repo_root
                )
            )

        # Start the merged yaml config from the global and global local configs
        merge_yaml = [self.global_config_obj.config_global_path]
        if self.global_config_obj.config_global_local_path:
            merge_yaml.append(self.global_config_obj.config_global_local_path)

        # Load the project's yaml config file
        f_config = open(self.config_project_path, 'r')
        project_config = yaml.load(f_config)
        if project_config:
            self.config_project.update(project_config)
            merge_yaml.append(self.config_project_path)

        # Load the local project yaml config file if it exists
        if self.config_project_local_path:
            f_local_config = open(self.config_project_local_path, 'r')
            local_config = yaml.load(f_local_config)
            if local_config:
                self.config_project_local.update(local_config)
                merge_yaml.append(self.config_project_local_path)

        self.config = hiyapyco.load(*merge_yaml, method=hiyapyco.METHOD_MERGE)
コード例 #19
0
    def _load_config(self):
        """ Loads the configuration for the project """

        # Initialize the dictionaries for the individual configs
        self.config_project = {}
        self.config_project_local = {}

        # Verify that we're in a project
        repo_root = self.repo_root
        if not repo_root:
            raise NotInProject('No repository found in current path.  You must be inside a repository to initialize the project configuration')

        # Verify that the project's root has a config file
        if not self.config_project_path:
            raise ProjectConfigNotFound(
                'The file {} was not found in the repo root: {}'.format(
                    self.config_filename,
                    repo_root
                )
            )

        # Start the merged yaml config from the global and global local configs
        merge_yaml = [self.global_config_obj.config_global_path]
        if self.global_config_obj.config_global_local_path:
            merge_yaml.append(self.global_config_obj.config_global_local_path)

        # Load the project's yaml config file
        with open(self.config_project_path, 'r') as f_config:
            project_config = yaml.load(f_config)
        if project_config:
            self.config_project.update(project_config)
            merge_yaml.append(self.config_project_path)

        # Load the local project yaml config file if it exists
        if self.config_project_local_path:
            with open(self.config_project_local_path, 'r') as f_local_config:
                local_config = yaml.load(f_local_config)
            if local_config:
                self.config_project_local.update(local_config)
                merge_yaml.append(self.config_project_local_path)

        self.config = hiyapyco.load(*merge_yaml, method=hiyapyco.METHOD_MERGE)
コード例 #20
0
ファイル: bot_lang.py プロジェクト: vsraptor/bot_lang
    def merge_yaml(self,
                   out_file,
                   generated_yaml,
                   yaml_path,
                   yaml_mask='*.yml'):
        print ">> Merge yaml files : gen:%s + inp:%s/%s => %s" % (
            generated_yaml, yaml_path, yaml_mask, out_file)
        yaml_list = []

        for fname in glob.glob(os.path.join(yaml_path, yaml_mask)):
            with open(fname) as fp:
                yaml_file = fp.read()
                yaml_list.append(yaml_file)

            yaml_list.append(generated_yaml)
            merged_yaml = hiyapyco.load(yaml_list,
                                        method=hiyapyco.METHOD_MERGE)
            #print(hiyapyco.dump(merged_yaml))
            domain = open(out_file, "w+")
            domain.writelines(hiyapyco.dump(merged_yaml))
コード例 #21
0
ファイル: app.py プロジェクト: silfa718/hassio-addons
def load_config(config_file):
    with open(config_file, 'r') as stream:
        try:
            config_files = ['config/default.yaml']
            for interface in INTERFACES:
                config_files.append(
                    f"bridge/interfaces/{interface}/config.yaml")
            config_files.append(config_file)

            log.debug("Loading configuration from: %s", config_files)
            config = hiyapyco.load(config_files,
                                   method=hiyapyco.METHOD_MERGE,
                                   interpolate=True,
                                   failonmissingfiles=True)

            #config = yaml.safe_load(stream)
            return config
        except yaml.YAMLError as exc:
            sys.stderr.write(f"FATAL! {exc}")
            sys.exit(1)
コード例 #22
0
def load_config_files(*files):
    """Load and merge YAML config files.

    Files that come earlier in the list take precedence over files
    that come later in the list.

    Args:
        *files (list) : Variable number of file paths.

    Example::

        load_config_files(file1, file2, file3, ...):
    """

    files = [file for file in files if file is not None and file != '']

    for file in files:
        sys.path.insert(0, os.path.dirname(file))

    LOG.debug('Loading config files: {}'.format(files))

    # hiyapyco merges in order least important to most important
    files.reverse()

    expanded_files = process_templates(files)

    hiyapyco.jinja2env = NativeEnvironment(variable_start_string='(',
                                           variable_end_string=')',
                                           undefined=LogUndefined)

    cfg_dict = hiyapyco.load(expanded_files,
                             method=hiyapyco.METHOD_MERGE,
                             interpolate=True,
                             failonmissingfiles=True)

    if LOG.getEffectiveLevel() == logLevelFromName("DEBUG"):
        LOG.debug("Merged YAML config:\n\n%s\n",
                  hiyapyco.dump(cfg_dict, default_flow_style=False))

    return cfg_dict
コード例 #23
0
    def __init__(self, logger, environment):

        # Load configuration
        pkg_base = os.path.dirname(__file__)
        config_file = ['{}/../config/config.yml'.format(pkg_base)]
        config_file_env = '{}/../config/{}/config.yml'.format(
            pkg_base, environment)
        if os.path.exists(config_file_env):
            config_file.append(config_file_env)
        local_config_file = '{}/../config/local_config.yml'.format(pkg_base)
        if os.path.exists(local_config_file):
            config_file.append(local_config_file)
        config = hiyapyco.load(config_file, method=hiyapyco.METHOD_MERGE)

        self.limited_mode = config['limited_mode']
        self.post_to_slack = config['post_to_slack']
        self.slack_channel = config['slack_channel']
        self.admin_slack_channel = config['admin_slack_channel']
        self.users_slack_channel = config['users_slack_channel']
        aws_secret_name = config['aws_secret_name']
        aws_secret_region = config['aws_secret_region']

        # Load sensitives data from AWS Secret Manager
        try:
            logger.info("Load AWS secret")
            secret = get_secret(aws_secret_name, aws_secret_region)
            self.slack_event_token = secret['slack_event_token']
            self.admin_users = secret['admin_users']
            self.slack_bot_token = secret['slack_bot_token']
            self.slack_app_token = secret['slack_app_token']
            logger.info("AWS secret loaded with success")
        except:
            logger.info("AWS secret failed, fallback with config file")
            self.slack_event_token = config['slack_event_token']
            self.admin_users = config['admin_users']
            self.slack_bot_token = config['slack_bot_token']
            self.slack_app_token = config['slack_app_token']
            logger.info("Configuration file loaded with success")
コード例 #24
0
ファイル: cli.py プロジェクト: eerorika/conan-settings
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        description='Modify Conan settings.yml configuration')
    parser.add_argument('--merge-file',
                        nargs='*',
                        default=[],
                        help='YAML config file to merge')
    parser.add_argument('--method', default='METHOD_MERGE')
    bool_arg(parser, 'mergelists', True)
    bool_arg(parser, 'interpolate', False)
    bool_arg(parser, 'castinterpolated', False)
    bool_arg(parser, 'usedefaultyamlloader', False)
    bool_arg(parser, 'failonmissingfiles', True)
    args = parser.parse_args()

    in_data = get_stdin() or ''
    in_data += "\n"  # newline is used to distinguish yaml from filename

    output = ConanOutput(sys.stdout, sys.stderr, True)
    conan_cache = cache.ClientCache(
        os.path.join(get_conan_user_home(), '.conan'), output)
    path = conan_cache.settings_path

    existing = cache.load(path) \
        if os.path.exists(path) \
        else default_settings()
    method = hiyapyco.METHODS[args.method]
    settings = hiyapyco.load([existing, in_data],
                             *args.merge_file,
                             mergelists=args.mergelists,
                             method=method,
                             interpolate=args.interpolate,
                             castinterpolated=args.castinterpolated,
                             usedefaultyamlloader=args.usedefaultyamlloader,
                             failonmissingfiles=args.failonmissingfiles)
    settings_yml = hiyapyco.dump(settings)
    cache.save(path, normalize(settings_yml), only_if_modified=True)
コード例 #25
0
    def __init__(self):

        # Load configuration
        pkg_base = os.path.dirname(__file__)
        config_file = ['{}/../config/config.yml'.format(pkg_base)]
        local_config_file = '{}/../config/local_config.yml'.format(pkg_base)
        if os.path.exists(local_config_file):
            config_file.append(local_config_file)
        config = hiyapyco.load(config_file, method=hiyapyco.METHOD_MERGE)

        aws_secret_name = config['aws_secret_name']
        aws_secret_region = config['aws_secret_region']

        # Load API token from AWS Secret Manager
        try:
            print("--- INFO: Load AWS secrets")
            secret = get_secret(aws_secret_name, aws_secret_region)
            self.slack_token = secret['slack_bot_token']
            print("--- INFO: AWS secrets loaded with success")
        except:
            print("--- INFO: AWS secret failed, fallback with config file")
            self.slack_token = config['slack_bot_token']
            print("--- INFO: Configuration file loaded with success")
コード例 #26
0
ファイル: bulkdata.py プロジェクト: rfrankus1/CumulusCI
 def _init_mapping(self):
     self.mapping = hiyapyco.load(self.options['mapping'])
コード例 #27
0
ファイル: test_odict.py プロジェクト: adragomir/hiyapyco
logger = testsetup.setup(sys.argv[1:])

basepath = os.path.dirname(os.path.realpath(__file__))

print('start test %s for hiyapyco %s using python %s (loglevel:%s)' % (
            __file__,
            hiyapyco.__version__,
            platform.python_version(),
            logging.getLevelName(logger.getEffectiveLevel())
        )
    )

logger.info('test odict interploation  using ODYLDo ...')
conf = hiyapyco.load(
        os.path.join(basepath, 'odict.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True,
        interpolate=True
        )

source_domain = conf['source']['domain']
source_domainDN = conf['source']['domainDN']
logger.info('test interpolation source_domainDN and source_domain ... %s : %s' % (source_domainDN, source_domain))
assert source_domainDN == 'dc=%s' % ',dc='.join(source_domain.split('.'))

source_baseDN = conf['source']['baseDN']
logger.info('test interpolation source_baseDN ... %s' % source_baseDN)
assert source_baseDN == 'ou=Users,%s' % source_domainDN

source_bindDN = conf['source']['bindDN']
logger.info('test interpolation source_bindDN ... %s' % source_bindDN)
assert source_bindDN == 'cn=Administrator,%s' % source_baseDN
コード例 #28
0
basepath = os.path.dirname(os.path.realpath(__file__))

print('start test %s for hiyapyco %s using python %s (loglevel:%s)' % (
            __file__,
            hiyapyco.__version__,
            platform.python_version(),
            logging.getLevelName(logger.getEffectiveLevel())
        )
    )

yamlfile = os.path.join(basepath, 'castinterpolated.yaml')

logger.info('test uncasted ...')
conf = hiyapyco.load(
        yamlfile,
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True,
        interpolate=True,
        )
v = conf['three']
logger.info('test uncasted: %s (type: %s) ... ' % (v, type(v),))
assert v == '3'
v = conf['c']
logger.info('test uncasted: %s (type: %s) ... ' % (v, type(v),))
assert v == '5.9'
v = conf['bool']
logger.info('test uncasted: %s (type: %s) ... ' % (v, type(v),))
assert v == 'True'

conf = hiyapyco.load(
        yamlfile,
        method=hiyapyco.METHOD_SIMPLE,
コード例 #29
0
        level=logging.INFO,
        format='%(levelname)s\t[%(name)s] %(funcName)s: %(message)s'
        )

print('*'*70)
print('  hiyapyco %s' % hiyapyco.version.VERSION)
print('*'*70)

print('-'*70)
print('  loglevelmissingfiles=logging.INFO + failonmissingfiles=True ...')
print('-'*70)
try:
    conf = hiyapyco.load(
        os.path.join(basepath, 'olist1.yaml'),
        os.path.join(basepath, 'NoSuchFile.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True,
        loglevelmissingfiles=logging.INFO
        )
except hiyapyco.HiYaPyCoInvocationException:
    pass


print('-'*70)
print('  loglevelmissingfiles=logging.INFO + failonmissingfiles=False ...')
print('-'*70)
conf = hiyapyco.load(
        os.path.join(basepath, 'olist1.yaml'),
        os.path.join(basepath, 'NoSuchFile.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=False,
コード例 #30
0
ファイル: test_invocation.py プロジェクト: adragomir/hiyapyco
except hiyapyco.HiYaPyCoInvocationException as e:
    assert '%s' % e == 'yaml file not found: \'nosuchfile.yaml\''

logger.info('test missing file w/ failonmissingfiles=False ...')
conf = hiyapyco.HiYaPyCo(
    os.path.join(basepath, 'base.yaml'),
    'nosuchfile.yaml',
    failonmissingfiles=False
    )
assert conf.yamlfiles() == [os.path.join(basepath, 'base.yaml')]
assert '%s' % conf == 'hiyapyco [%s]' % os.path.join(basepath, 'base.yaml')

logger.info('test normal file list ...')
conf = hiyapyco.load(
        os.path.join(basepath, 'base.yaml'),
        os.path.join(basepath, 'baseext.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True
        )

logger.info('test files as list ...')
conf = hiyapyco.load(
        [
            os.path.join(basepath, 'base.yaml'),
            os.path.join(basepath, 'baseext.yaml')
        ],
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True
        )

logger.info('test yaml doc as str ...')
y1 = """
コード例 #31
0
ファイル: bulkdata.py プロジェクト: tet3/CumulusCI
 def _init_mapping(self):
     self.mapping = hiyapyco.load(self.options['mapping'])
コード例 #32
0
logger = testsetup.setup(sys.argv[1:])

basepath = os.path.dirname(os.path.realpath(__file__))

print('start test %s for hiyapyco %s using python %s (loglevel:%s)' % (
            __file__,
            hiyapyco.__version__,
            platform.python_version(),
            logging.getLevelName(logger.getEffectiveLevel())
        )
    )

logger.info('test simple vals ...')
conf = hiyapyco.load(
        os.path.join(basepath, 'interpolate.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True,
        interpolate=True
        )
t1 = conf.get('first')
t2 = conf.get('firstcopy')
t3 = conf['second']
logger.info('test first interpolation ... %s : %s' % (t1, t2,))
assert t1 == t2
logger.info('test second interpolation ... %s : %s' % (t1, t3,))
assert t3 == 'again %s' % t1

t = conf.get('list')
logger.info('test interpolation in list ... %s' % t)
assert t == ['abc', 'bcd', 'cde', 'abc']

t = conf['elist']
コード例 #33
0
ファイル: test_multiple.py プロジェクト: zerwes/hiyapyco
logger = testsetup.setup(sys.argv[1:])

basepath = os.path.dirname(os.path.realpath(__file__))

print('start test %s for hiyapyco %s using python %s (loglevel:%s)' % (
            __file__,
            hiyapyco.__version__,
            platform.python_version(),
            logging.getLevelName(logger.getEffectiveLevel())
        )
    )

logger.info('test multiple yaml in one file w/ original yaml loader ...')
conf = hiyapyco.load(
        os.path.join(basepath, 'multiple.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True,
        usedefaultyamlloader=True
        )

t = conf['a']
logger.info('test single val ... %s' % t)
assert t == 'xxx'

t = conf['b']
logger.info('test single val ... %s' % t)
assert t == 'bbb'

t = conf['d']
logger.info('test dict val ... %s' % t)
assert t == {'a' : 'x', 'z' : 'z'}
コード例 #34
0
    def process_github_dependency(self, dependency, indent=None):
        if not indent:
            indent = ''

        self.logger.info(
            '{}Processing dependencies from Github repo {}'.format(
                indent,
                dependency['github'],
            ))

        skip = dependency.get('skip')
        if not isinstance(skip, list):
            skip = [
                skip,
            ]

        # Initialize github3.py API against repo
        gh = self.get_github_api()
        repo_owner, repo_name = dependency['github'].split('/')[3:5]
        if repo_name.endswith('.git'):
            repo_name = repo_name[:-4]
        repo = gh.repository(repo_owner, repo_name)

        # Get the cumulusci.yml file
        contents = repo.contents('cumulusci.yml')
        cumulusci_yml = hiyapyco.load(contents.decoded)

        # Get the namespace from the cumulusci.yml if set
        namespace = cumulusci_yml.get('project', {}).get('package',
                                                         {}).get('namespace')

        # Check for unmanaged flag on a namespaced package
        unmanaged = namespace and dependency.get('unmanaged') is True

        # Look for subfolders under unpackaged/pre
        unpackaged_pre = []
        contents = repo.contents('unpackaged/pre')
        if contents:
            for dirname in list(contents.keys()):
                if 'unpackaged/pre/{}'.format(dirname) in skip:
                    continue
                subfolder = "{}-{}/unpackaged/pre/{}".format(
                    repo.name, repo.default_branch, dirname)
                zip_url = "{}/archive/{}.zip".format(repo.html_url,
                                                     repo.default_branch)

                unpackaged_pre.append({
                    'zip_url':
                    zip_url,
                    'subfolder':
                    subfolder,
                    'unmanaged':
                    dependency.get('unmanaged'),
                    'namespace_tokenize':
                    dependency.get('namespace_tokenize'),
                    'namespace_inject':
                    dependency.get('namespace_inject'),
                    'namespace_strip':
                    dependency.get('namespace_strip'),
                })

        # Look for metadata under src (deployed if no namespace)
        unmanaged_src = None
        if unmanaged or not namespace:
            contents = repo.contents('src')
            if contents:
                zip_url = "{}/archive/{}.zip".format(repo.html_url,
                                                     repo.default_branch)
                subfolder = "{}-{}/src".format(repo.name, repo.default_branch)

                unmanaged_src = {
                    'zip_url': zip_url,
                    'subfolder': subfolder,
                    'unmanaged': dependency.get('unmanaged'),
                    'namespace_tokenize': dependency.get('namespace_tokenize'),
                    'namespace_inject': dependency.get('namespace_inject'),
                    'namespace_strip': dependency.get('namespace_strip'),
                }

        # Look for subfolders under unpackaged/post
        unpackaged_post = []
        contents = repo.contents('unpackaged/post')
        if contents:
            for dirname in list(contents.keys()):
                if 'unpackaged/post/{}'.format(dirname) in skip:
                    continue
                zip_url = "{}/archive/{}.zip".format(repo.html_url,
                                                     repo.default_branch)
                subfolder = "{}-{}/unpackaged/post/{}".format(
                    repo.name, repo.default_branch, dirname)

                dependency = {
                    'zip_url': zip_url,
                    'subfolder': subfolder,
                    'unmanaged': dependency.get('unmanaged'),
                    'namespace_tokenize': dependency.get('namespace_tokenize'),
                    'namespace_inject': dependency.get('namespace_inject'),
                    'namespace_strip': dependency.get('namespace_strip'),
                }
                # By default, we always inject the project's namespace into
                # unpackaged/post metadata
                if namespace and not dependency.get('namespace_inject'):
                    dependency['namespace_inject'] = namespace
                    dependency['unmananged'] = unmanaged
                unpackaged_post.append(dependency)

        project = cumulusci_yml.get('project', {})
        dependencies = project.get('dependencies')
        if dependencies:
            dependencies = self.get_static_dependencies(dependencies)
        version = None

        if namespace and not unmanaged:
            # Get version
            version = dependency.get('version')
            if 'version' not in dependency or dependency['version'] == 'latest':
                # github3.py doesn't support the latest release API so we hack
                # it together here
                url = repo._build_url('releases/latest', base_url=repo._api)
                try:
                    version = repo._get(url).json()['name']
                except Exception as e:
                    self.logger.warn('{}{}: {}'.format(indent,
                                                       e.__class__.__name__,
                                                       e.message))

            if not version:
                self.logger.warn(
                    '{}Could not find latest release for {}'.format(
                        indent, namespace))

        # Create the final ordered list of all parsed dependencies
        repo_dependencies = []

        # unpackaged/pre/*
        if unpackaged_pre:
            repo_dependencies.extend(unpackaged_pre)

        # Latest managed release (if referenced repo has a namespace)
        if namespace and not unmanaged:
            if version:
                # If a latest prod version was found, make the dependencies a
                # child of that install
                dependency = {
                    'namespace': namespace,
                    'version': version,
                }
                if dependencies:
                    dependency['dependencies'] = dependencies

                repo_dependencies.append(dependency)
            elif dependencies:
                repo_dependencies.extend(dependencies)

        # Unmanaged metadata from src (if referenced repo doesn't have a
        # namespace)
        else:
            if dependencies:
                repo_dependencies.extend(dependencies)
            if unmanaged_src:
                repo_dependencies.append(unmanaged_src)

        # unpackaged/post/*
        if unpackaged_post:
            repo_dependencies.extend(unpackaged_post)

        return repo_dependencies
コード例 #35
0
def merge_all_metadata(gdspath, device_manifest_path):
    """ from a gds mask combines test_protocols and labels positions for each DOE
    Do a map cell: does
    Usually each cell will have only one DOE. But in general it should be allowed for a cell to belong to multiple DOEs
    """
    mask_json_path = gdspath.replace(".gds", ".json")
    csv_labels_path = gdspath.replace(".gds", ".csv")
    output_tm_path = gdspath.replace(".gds", ".tp.json")
    tm_dict = {}

    device_manifest_data = parse_device_manifest(device_manifest_path)

    mask_directory = os.path.split(gdspath)[0]
    mask_build_directory = os.path.split(mask_directory)[0]
    mask_root_directory = os.path.split(mask_build_directory)[0]
    test_protocols_path = os.path.join(mask_root_directory, "test_protocols.yml")
    analysis_protocols_path = os.path.join(
        mask_root_directory, "data_analysis_protocols.yml"
    )

    assert os.path.isfile(mask_json_path), "missing {}".format(mask_json_path)
    assert os.path.isfile(csv_labels_path), "missing {}".format(csv_labels_path)

    mask_data = parse_mask_json(mask_json_path)

    if os.path.isfile(test_protocols_path):
        test_protocols = hiyapyco.load(test_protocols_path)
        tm_dict["test_protocols"] = test_protocols
    if os.path.isfile(analysis_protocols_path):
        analysis_protocols = hiyapyco.load(analysis_protocols_path)
        tm_dict["analysis_protocols"] = analysis_protocols

    data = parse_csv_data(csv_labels_path)
    # cell_x_y = [(get_cell_from_label(l), x, y) for l, x, y in data]

    does = mask_data["does"]
    cells = mask_data["cells"]
    doe_devices = set()
    for doe in does.values():
        doe_devices.update(doe["cells"])

    # from pprint import pprint
    # pprint(list(cells.keys()))
    # pprint(list(device_manifest_data.keys()))

    ## Inject device manifest data into cells
    for cell_name in doe_devices:

        if cell_name not in cells:
            print(
                "skip reconcile data for cell {} - cell not in cells".format(cell_name)
            )
            continue

        if cell_name not in device_manifest_data:
            print(
                "skip reconcile data for cell {} - cell not in device manifest".format(
                    cell_name
                )
            )
            continue
        cells[cell_name].update(device_manifest_data[cell_name])

    ## Map cell to DOEs - generic case where a cell could belong to multiple experiments
    cell_to_does = {}
    for doe_name, doe in does.items():
        for c in doe["cells"]:
            if c not in cell_to_does:
                cell_to_does[c] = set()
            cell_to_does[c].update([doe_name])

    tm_dict["does"] = {}
    doe_tm = tm_dict["does"]
    doe_tm.update(does)
    for doe_name, doe in doe_tm.items():
        doe.pop("cells")
        doe["instances"] = {}

    ## Cell instances which need to be measured MUST have a unique cell name
    for label, x, y in data:
        cell_name = get_cell_from_label(label)
        if cell_name not in cell_to_does:
            continue
        cell_does = cell_to_does[cell_name]
        for doe_name in cell_does:
            _doe = doe_tm[doe_name]

            if cell_name not in _doe["instances"]:
                # Unique Cell instance to labels and coordinates
                _doe["instances"][cell_name] = []
            _doe["instances"][cell_name].append({"label": label, "x": x, "y": y})

    # Adding the cells settings
    tm_dict["cells"] = cells

    with open(output_tm_path, "w") as json_out:
        json.dump(tm_dict, json_out, indent=2)
コード例 #36
0
ファイル: list.py プロジェクト: zelitomas/hiyapyco
#! /usr/bin/env python

import sys
import os
import logging

basepath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.dirname(basepath))

import hiyapyco

print('='*10, 'method=hiyapyco.METHOD_SIMPLE', '='*10)

conf = hiyapyco.load(
        os.path.join(basepath, 'list1.yaml'),
        os.path.join(basepath, 'list2.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True
        )
print(conf)
print('-'*10, 'YAML', '-'*10)
print(hiyapyco.dump(conf))

print('='*10, 'method=hiyapyco.METHOD_MERGE', '='*10)

conf = hiyapyco.load(
        os.path.join(basepath, 'list1.yaml'),
        os.path.join(basepath, 'list2.yaml'),
        method=hiyapyco.METHOD_MERGE,
        failonmissingfiles=True
        )
print(conf)
コード例 #37
0
def main():
    parser = ArgumentParser(description="Stratumus Layered Config Parser")
    parser.add_argument(
        "-c",
        "--config",
        type=str,
        default=None,
        required=False,
        help="Stratumus hierarchy config (default: $root/stratumus.yaml")
    parser.add_argument("-r",
                        "--root",
                        type=str,
                        default=None,
                        required=False,
                        help="Directory with config data (default: .)")
    parser.add_argument("-i",
                        "--hierarchy",
                        nargs='+',
                        action='append',
                        type=str,
                        required=False)
    parser.add_argument("-o",
                        "--out",
                        type=str,
                        default=None,
                        help="Output directory",
                        required=False)
    parser.add_argument("-j",
                        "--with-json",
                        action='store_true',
                        help="Dumps json in addition to yaml",
                        required=False)
    parser.add_argument("-d",
                        "--debug",
                        action='store_true',
                        help="Enable Debugging",
                        required=False)
    parser.add_argument("-m",
                        "--deep-merge",
                        action='store_true',
                        help="Attempt to use hiyapyco METHOD_MERGE. This"
                        "will perform a deep merge, recursively merging"
                        "dictionaries nested in lists.",
                        required=False)

    args, unknown = parser.parse_known_args()

    if args.debug:
        hiyapyco.jinja2env = Environment(undefined=DebugUndefined)
        logger.setLevel(logging.DEBUG)
        hilogger.setLevel(logging.DEBUG)

    if args.config and args.root is None:
        args.root = os.path.dirname(args.config)

    if args.root is None:
        args.root = os.path.abspath('.')

    if args.config is None:
        args.config = os.path.sep.join([args.root, 'stratumus.yaml'])

    stratum_config = hiyapyco.load(args.config, failonmissingfiles=False) or {}

    stratum_config['root'] = args.root

    # Always use the user-specified hierarchy over the config file
    stratum_config['hierarchy'] = args.hierarchy or stratum_config.get(
        'hierarchy') or [[]]

    stratum_config['out'] = args.out

    stratum_config['with_json'] = args.with_json

    stratum_config['attempt_deep_merge'] = args.deep_merge

    stratum_config['debug'] = args.debug

    filters = dict(
        zip([u[2:] for u in unknown[:-1:2] if u.startswith('--')],
            unknown[1::2]))

    stratum_config['filters'] = filters or stratum_config.get('filters') or {}

    logger.debug(json.dumps(stratum_config))

    try:
        stratum = Stratum(
            root_dir=stratum_config.get('root'),
            hierarchies=stratum_config.get('hierarchy'),
            filters=filters,
            attempt_deep_merge=stratum_config.get('attempt_deep_merge'))
        stratum.dump_configs(stratum_config.get('out'),
                             stratum_config['with_json'])
    except Exception as e:
        logger.exception(e)
        sys.exit(1)
コード例 #38
0
    def process_github_dependency(self, dependency, indent=None):
        if not indent:
            indent = ''

        self.logger.info(
            '{}Processing dependencies from Github repo {}'.format(
                indent,
                dependency['github'],
            )
        )

        skip = dependency.get('skip')
        if not isinstance(skip, list):
            skip = [skip, ]

        # Initialize github3.py API against repo
        gh = self.get_github_api()
        repo_owner, repo_name = dependency['github'].split('/')[3:5]
        if repo_name.endswith('.git'):
            repo_name = repo_name[:-4]
        repo = gh.repository(repo_owner, repo_name)

        # Prepare HTTP auth header for requests calls to Github
        github = self.keychain.get_service('github')
        headers = {'Authorization': 'token {}'.format(github.password)}

        # Determine the ref if specified
        kwargs = {}
        if 'tag' in dependency:
            tag = dependency['tag']
            kwargs['ref'] = tag
        else:
            tag = None

        # Get the cumulusci.yml file
        contents = repo.contents('cumulusci.yml', **kwargs)
        cumulusci_yml = hiyapyco.load(contents.decoded, loglevel='INFO')

        # Get the namespace from the cumulusci.yml if set
        namespace = cumulusci_yml.get('project', {}).get(
            'package', {}).get('namespace')

        # Check for unmanaged flag on a namespaced package
        unmanaged = namespace and dependency.get('unmanaged') is True

        # Look for subfolders under unpackaged/pre
        unpackaged_pre = []
        contents = repo.contents('unpackaged/pre', **kwargs)
        if contents:
            for dirname in list(contents.keys()):
                if 'unpackaged/pre/{}'.format(dirname) in skip:
                    continue
                subfolder = "{}-{}/unpackaged/pre/{}".format(
                    repo.name, repo.default_branch, dirname)
                zip_url = "{}/archive/{}.zip".format(
                    repo.html_url, repo.default_branch)

                unpackaged_pre.append({
                    'zip_url': zip_url,
                    'subfolder': subfolder,
                    'headers': headers,
                    'unmanaged': dependency.get('unmanaged'),
                    'namespace_tokenize': dependency.get('namespace_tokenize'),
                    'namespace_inject': dependency.get('namespace_inject'),
                    'namespace_strip': dependency.get('namespace_strip'),
                })

        # Look for metadata under src (deployed if no namespace)
        unmanaged_src = None
        if unmanaged or not namespace:
            contents = repo.contents('src', **kwargs)
            if contents:
                zip_url = "{}/archive/{}.zip".format(
                    repo.html_url, repo.default_branch)
                subfolder = "{}-{}/src".format(repo.name, repo.default_branch)

                unmanaged_src = {
                    'zip_url': zip_url,
                    'subfolder': subfolder,
                    'headers': headers,
                    'unmanaged': dependency.get('unmanaged'),
                    'namespace_tokenize': dependency.get('namespace_tokenize'),
                    'namespace_inject': dependency.get('namespace_inject'),
                    'namespace_strip': dependency.get('namespace_strip'),
                }

        # Look for subfolders under unpackaged/post
        unpackaged_post = []
        contents = repo.contents('unpackaged/post', **kwargs)
        if contents:
            for dirname in list(contents.keys()):
                if 'unpackaged/post/{}'.format(dirname) in skip:
                    continue
                zip_url = "{}/archive/{}.zip".format(
                    repo.html_url, repo.default_branch)
                subfolder = "{}-{}/unpackaged/post/{}".format(
                    repo.name, repo.default_branch, dirname)

                dependency = {
                    'zip_url': zip_url,
                    'subfolder': subfolder,
                    'headers': headers,
                    'unmanaged': dependency.get('unmanaged'),
                    'namespace_tokenize': dependency.get('namespace_tokenize'),
                    'namespace_inject': dependency.get('namespace_inject'),
                    'namespace_strip': dependency.get('namespace_strip'),
                }
                # By default, we always inject the project's namespace into
                # unpackaged/post metadata
                if namespace and not dependency.get('namespace_inject'):
                    dependency['namespace_inject'] = namespace
                    dependency['unmanaged'] = unmanaged
                unpackaged_post.append(dependency)

        # Parse values from the repo's cumulusci.yml
        project = cumulusci_yml.get('project', {})
        prefix_beta = project.get('git', {}).get('prefix_beta', 'beta/')
        prefix_release = project.get('git', {}).get('prefix_release', 'release/')
        dependencies = project.get('dependencies')
        if dependencies:
            dependencies = self.get_static_dependencies(dependencies)

        # Create the final ordered list of all parsed dependencies
        repo_dependencies = []

        # unpackaged/pre/*
        if unpackaged_pre:
            repo_dependencies.extend(unpackaged_pre)

        if namespace and not unmanaged:
            version = None
            if tag:
                version = self.get_version_for_tag(tag, prefix_beta, prefix_release)
            else:
                # github3.py doesn't support the latest release API so we hack
                # it together here
                url = repo._build_url('releases/latest', base_url=repo._api)
                try:
                    version = repo._get(url).json()['name']
                except Exception as e:
                    self.logger.warn('{}{}: {}'.format(
                        indent, e.__class__.__name__, e.message))

            if not version:
                raise DependencyResolutionError(
                    '{}Could not find latest release for {}'.format(indent, namespace)
                )
            # If a latest prod version was found, make the dependencies a
            # child of that install
            dependency = {
                'namespace': namespace,
                'version': version,
            }
            if dependencies:
                dependency['dependencies'] = dependencies
            repo_dependencies.append(dependency)

        # Unmanaged metadata from src (if referenced repo doesn't have a
        # namespace)
        else:
            if dependencies:
                repo_dependencies.extend(dependencies)
            if unmanaged_src:
                repo_dependencies.append(unmanaged_src)

        # unpackaged/post/*
        if unpackaged_post:
            repo_dependencies.extend(unpackaged_post)

        return repo_dependencies
コード例 #39
0
ファイル: hiyapyco_example.py プロジェクト: mmariani/hiyapyco
        '-y', '--usedefaultyamlloader', dest='usedefaultyamlloader',
        action='store_true', default=False, help='yaml file(s) to parse'
    )
parser.add_argument('-f', '--file', type=str, nargs='+', help='yaml file(s) to parse')
args = parser.parse_args()

if args.loglevel is None:
    logging.disable(logging.CRITICAL)

# FIXME: in fact this should be the job of argparse
if args.file is None or len(args.file) == 0:
    raise Exception('please provide at least one yaml file!')

for mergemethod in hiyapyco.METHODS.keys():
    print('='*10, 'method=', mergemethod, '='*10)
    conf = hiyapyco.load(
        *args.file,
        method=hiyapyco.METHODS[mergemethod],
        interpolate=True,
        failonmissingfiles=True,
        usedefaultyamlloader=args.usedefaultyamlloader
        )
    print(conf)
    print('-'*10, 'YAML', '-'*10)
    print(hiyapyco.dump(conf))
    if len(args.file) < 2:
        break

# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 smartindent nu

コード例 #40
0
ファイル: listl3.py プロジェクト: mmariani/hiyapyco
import pprint

basepath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.dirname(basepath))
import hiyapyco

logger = logging.getLogger()
logging.basicConfig(
        level=logging.DEBUG,
        format='%(levelname)s\t[%(name)s] %(funcName)s: %(message)s'
        )

print('='*10, 'method=hiyapyco.METHOD_SIMPLE', '='*10)
conf = hiyapyco.load(
        os.path.join(basepath, 'listl31.yaml'),
        os.path.join(basepath, 'listl32.yaml'),
        failonmissingfiles=True
        )
print('-'*10, 'DATA', '-'*10)
print(conf)
print('-'*10, 'PPRINT', '-'*10)
pprint.PrettyPrinter(indent=4).pprint(conf)
print('-'*10, 'YAML', '-'*10)
print(hiyapyco.dump(conf))
print('-'*10, 'EOF', '-'*10)
assert conf['l3'] == [{'a': 'aaa', 'c': 'c'}, {'b': 'BBB', 'c': 'CCC'}, {'a': 'x', 'b': 'y', 'c': 'z'}]

print('='*10, 'method=hiyapyco.METHOD_MERGE', '='*10)
conf = hiyapyco.load(
        os.path.join(basepath, 'listl31.yaml'),
        os.path.join(basepath, 'listl32.yaml'),
コード例 #41
0
ファイル: test_merge.py プロジェクト: mmariani/hiyapyco
logger = testsetup.setup(sys.argv[1:])

basepath = os.path.dirname(os.path.realpath(__file__))

print('start test %s for hiyapyco %s using python %s (loglevel:%s)' % (
            __file__,
            hiyapyco.__version__,
            platform.python_version(),
            logging.getLevelName(logger.getEffectiveLevel())
        )
    )

logger.info('test simple vals ...')
conf = hiyapyco.load(
        os.path.join(basepath, 'base.yaml'),
        os.path.join(basepath, 'baseext.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=True
        )

t = conf['singel']
logger.info('test single val ... %s' % t)
assert t == 'ext'

t = conf['int']
logger.info('test int val ... %s' % t)
assert t == 10

t = conf['array']
logger.info('test list val ... %s' % t)
assert t == ['baseext1', 'baseext2']
コード例 #42
0
    def walk_configs(self):
        for hierarchy in self.hierarchies:
            glob_pattern_to_join = [self.config_dir]
            hierarchy_strings_to_alias = OrderedDict()
            for i, h in enumerate(hierarchy):
                if isinstance(h, str):
                    hierarchy_string = h
                    alias = h
                elif isinstance(h, OrderedDict) and len(h) == 1:
                    hierarchy_string = list(h.keys())[0]
                    alias = h[hierarchy_string]
                else:
                    raise Exception(
                        'Hierarchy elements must be either strings or OrderedDicts of length 1. Received {}'
                        .format(h))
                hierarchy_strings_to_alias[hierarchy_string] = alias
                default_filter = '**'
                extension = ''
                if i == len(hierarchy_string) - 1:
                    default_filter = '*'
                    extension = '.yaml'
                glob_pattern_to_join.append(
                    self.filters.get(hierarchy_string, default_filter) +
                    extension)
            glob_pattern = os.path.sep.join(glob_pattern_to_join)
            logger.debug("Glob pattern: {}".format(glob_pattern))
            leaves = [
                path for path in glob(glob_pattern)
                if INCLUSIVE_VALUE not in path
            ]
            for leaf in leaves:
                logger.debug("Config file: {}".format(leaf))
                _leaf = os.path.splitext(
                    leaf)[0][len(self.config_dir):].lstrip('/')
                path_components = _leaf.split(os.path.sep)
                hierarchy_dict = OrderedDict(
                    zip(list(hierarchy_strings_to_alias.keys()),
                        path_components))
                logger.debug("Hierarchy: {}".format(
                    json.dumps(hierarchy_dict)))
                yaml_hierarchy_defaults = odyldo.safe_dump(
                    hierarchy_dict, default_flow_style=False)
                # FIRST APPEND HIERARCHICAL VALUES
                yaml_files_to_be_loaded = [yaml_hierarchy_defaults]

                # NOW APPEND DEFAULT FILES
                for k, v in hierarchy_dict.items():
                    default_fp = os.path.sep.join(
                        [self.default_dir, k, v + '.yaml'])
                    if os.path.isfile(default_fp):
                        yaml_files_to_be_loaded.append(default_fp)

                # NOW APPEND CONFIG FILES
                hierarchy_values = hierarchy_dict.values()
                possible_paths = itertools.product(
                    *[[INCLUSIVE_VALUE, val] for val in hierarchy_values])

                def _gen_config_paths():
                    for possible_path_components in possible_paths:
                        # cut config/dev/foo/api/@/@.yaml to config/dev/foo/api.yaml
                        stripped = tuple(
                            _rstrip_list(possible_path_components))
                        if stripped:
                            config_filename = stripped[-1] + '.yaml'
                            config_filepath = os.path.sep.join(
                                (self.config_dir, ) + stripped[:-1] +
                                (config_filename, ))
                            if os.path.isfile(config_filepath):
                                yield config_filepath

                sorted_config_files = sorted(
                    _gen_config_paths(),
                    key=lambda value: (len(value.split(os.path.sep)), value))

                yaml_files_to_be_loaded.extend(sorted_config_files)

                logger.debug("YAML files to be loaded: {}".format(
                    yaml_files_to_be_loaded[1:]))
                config = {}
                if self.attempt_deep_merge:
                    try:
                        config = hiyapyco.load(yaml_files_to_be_loaded,
                                               failonmissingfiles=True,
                                               interpolate=True,
                                               method=METHOD_MERGE)
                    except:
                        logger.debug(
                            'Unable to load with method=merge, will attempt method=simple'
                        )
                if not config:
                    config = hiyapyco.load(yaml_files_to_be_loaded,
                                           failonmissingfiles=True,
                                           interpolate=True,
                                           method=METHOD_SIMPLE)
                for (hierarchy_string,
                     hierarchy_alias) in hierarchy_strings_to_alias.items():
                    if hierarchy_alias != hierarchy_string:
                        if hierarchy_alias:
                            config[hierarchy_alias] = config[hierarchy_string]
                        config.pop(hierarchy_string)
                output_name = leaf[len(self.config_dir):].lstrip('/')
                self.config[output_name] = config
コード例 #43
0
                    default= os.path.splitext(sys.argv[0])[0]+'.log')
listpaths=[]
for c in argfile_parser.parse_known_args()[0].config_paths:
    print("config folder:",c)
    if c[0] != '/' : c=os.path.abspath(os.path.join(root_dir,c))
    default_file=os.path.join(c,'defaults.yaml')
    if os.path.exists(default_file)  :  listpaths.append(default_file)

argsfile=argfile_parser.parse_known_args()[0].args_file
if not os.path.exists(argsfile) : argsfile = os.path.join(root_dir,'config','args.yaml')
if os.path.exists(argsfile) : listpaths.append(argsfile)
print("conf files-->",listpaths)
conf = hiyapyco.load(
#        *[os.path.join(argfile_parser.parse_known_args()[0].config_paths,'defaults.yaml'),
#        argfile_parser.parse_known_args()[0].args_file],
        *listpaths,
        interpolate=True,
        method=hiyapyco.METHOD_MERGE,
        failonmissingfiles=False
        )
defaults=conf['defaults']
for d in defaults:
    print("default :",d,defaults[d])

configurations=conf['configurations']
for d in configurations :
    print("configuration :",d,type(configurations[d]).__name__,configurations[d])

conf_args=conf['args']
for a in conf_args:
  #print(a,conf_args[a])
  arguments=dict()
コード例 #44
0
ファイル: issue33.py プロジェクト: zerwes/hiyapyco
import hiyapyco

logger = logging.getLogger()
logging.basicConfig(
        level=logging.INFO,
        format='%(levelname)s\t[%(name)s] %(funcName)s: %(message)s'
        )

ymlstr = """
k2: 222
k3:
  - done: a2
 - dtwo: b2
dthree
"""


conf = hiyapyco.load(
        ymlstr,
        os.path.join(basepath, 'olist1.yaml'),
        os.path.join(basepath, 'err.yaml'),
        method=hiyapyco.METHOD_SIMPLE,
        failonmissingfiles=False
        )

print (hiyapyco.dump(conf))

# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 smartindent nu

コード例 #45
0
ファイル: issue25.py プロジェクト: zerwes/hiyapyco
base = """
key_one: 1
key_two: 2
array_of_dicts:
- dict_key_one: a
  dict_key_two: b
  dict_key_three: c
- dict_key_one: a1
  dict_key_two: b1
  dict_key_three: c1
"""
layer = """
key_two: 2222
array_of_dicts:
- dict_key_one: a2
  dict_key_two: b2
  dict_key_three: c2
"""


CONF = hiyapyco.load([base, layer], method=hiyapyco.METHOD_MERGE)
print (hiyapyco.dump(CONF))

print ("... using mergelists=False ...")
CONF = hiyapyco.load([base, layer], method=hiyapyco.METHOD_MERGE, mergelists=False)
print (hiyapyco.dump(CONF))

# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 smartindent nu