Exemple #1
0
def load_config():
    YamlIncludeConstructor.add_to_loader_class(loader_class=yaml.FullLoader)

    with open("config.yaml", "r") as file:
        yaml_as_dictionary = yaml.load(file, Loader=yaml.FullLoader)

    return from_dict(data_class=Config, data=yaml_as_dictionary)
def main(prog_args):
    YamlIncludeConstructor.add_to_loader_class(loader_class=yaml.FullLoader)

    print('read_config...')
    dtp_config = read_config(prog_args)
    print(dtp_config)

    print('load_driver...')
    dtp_driver = load_driver(dtp_config)
    print(dtp_driver)

    print('load_data_source...')
    data_source = load_data_source(dtp_config, dtp_driver)
    print(list(data_source))

    print('transform_data_source...')
    pygm_source = transform_data_source(dtp_config, data_source)
    print(pygm_source)

    print('translate_into_yaml...')
    pygm_yaml = translate_into_yaml(dtp_config, pygm_source)
    print(pygm_yaml)

    print('process_info_schema...')
    metadata_formatted = process_info_schema(dtp_config, pygm_yaml)
    print(metadata_formatted)

    print('Exiting...')
    pass
Exemple #3
0
def load_yaml_data(conf_path: Path, quiet: bool = False) -> AnyByStrDict:
    """Load the `copier.yml` file.

    This is like a simple YAML load, but applying all specific quirks needed
    for [the `copier.yml` file][the-copieryml-file].

    For example, it supports the `!include` tag with glob includes, and
    merges multiple sections.

    Params:
        conf_path: The path to the `copier.yml` file.
        quiet: Used to configure the exception.

    Raises:
        InvalidConfigFileError: When the file is formatted badly.
    """
    YamlIncludeConstructor.add_to_loader_class(loader_class=yaml.FullLoader,
                                               base_dir=conf_path.parent)

    try:
        with open(conf_path) as f:
            flattened_result = deepflatten(
                yaml.load_all(f, Loader=yaml.FullLoader),
                depth=2,
                types=(list, ),
            )
            # HACK https://bugs.python.org/issue32792#msg311822
            # I'd use ChainMap, but it doesn't respect order in Python 3.6
            result = {}
            for part in flattened_result:
                result.update(part)
            return result
    except yaml.parser.ParserError as e:
        raise InvalidConfigFileError(conf_path, quiet) from e
Exemple #4
0
 def load_manifest_from_file(filename: str, type: Type[T]) -> T:
     _logger.debug("Loading manifest file (%s)", filename)
     filepath = os.path.abspath(filename)
     _logger.debug("filepath: %s", filepath)
     filedir: str = os.path.dirname(filepath)
     utils.print_dir(dir=filedir)
     YamlIncludeConstructor.add_to_loader_class(
         loader_class=yaml.SafeLoader, base_dir=filedir)
     _add_ssm_param_injector()
     _add_env_var_injector()
     with open(filepath, "r") as f:
         raw: Dict[str, Any] = cast(Dict[str, Any], yaml.safe_load(f))
     _logger.debug("raw: %s", raw)
     if type is Manifest:
         raw["SsmParameterName"] = f"/orbit/{raw['Name']}/manifest"
         manifest: T = cast(
             T,
             Manifest.Schema().load(data=raw,
                                    many=False,
                                    partial=False,
                                    unknown=EXCLUDE))
     elif type is FoundationManifest:
         raw["SsmParameterName"] = f"/orbit-f/{raw['Name']}/manifest"
         manifest = cast(
             T,
             FoundationManifest.Schema().load(data=raw,
                                              many=False,
                                              partial=False,
                                              unknown=EXCLUDE))
     else:
         raise ValueError("Unknown 'manifest' Type")
     ManifestSerDe.dump_manifest_to_ssm(manifest=manifest)
     return manifest
Exemple #5
0
 def _load_yaml(self, file_yaml):
     """ Load a yaml file """
     yaml_dir = os.path.dirname(file_yaml)
     self._debug(f"Loading YAML file '{file_yaml}', base dir '{yaml_dir}'")
     YamlIncludeConstructor.add_to_loader_class(
         loader_class=yaml.FullLoader, base_dir=yaml_dir)
     with open(file_yaml) as yaml_file:
         return yaml.load(yaml_file, Loader=yaml.FullLoader)
Exemple #6
0
def open_config(config_path: str,
                is_yaml: bool = False) -> Tuple[Union[Dict, dict], bool]:
    """Open yaml config

    Args:
        config_path (str): yaml config path
        is_yaml (bool): If True, return yaml dict rather than addict.Dict

    Returns: CONFIG
    """
    # * Check for existence
    if not osp.isfile(config_path):
        raise RuntimeError(f"No config file found at {config_path}")

    # * Get extension
    name, ext = osp.splitext(config_path)

    is_py = False
    # * Read yml/yaml config
    if ext == '.yml' or ext == '.yaml':
        # * Read and set config
        config_dir, _ = osp.split(config_path)
        YamlIncludeConstructor.add_to_loader_class(
            loader_class=yaml.FullLoader, base_dir=config_dir)
        with open(config_path, 'r') as f:
            yaml_config = yaml.load(f, Loader=yaml.FullLoader)

        if is_yaml:
            config = yaml_config
        else:
            config = Dict(yaml_config)
    elif ext == '.py':
        is_py = True

        # # * Get import name
        # def get_import_name(name):
        #     name = ntpath.normpath(name)
        #     return '.'.join(name.split(ntpath.sep))

        # * Return config
        spec = spec_from_file_location("foo", config_path)
        module = module_from_spec(spec)
        spec.loader.exec_module(module)
        config: Dict = module.config
        # config: Dict = import_module(get_import_name(name)).config
    elif ext == '.json':
        is_py = True
        with open(config_path, 'r') as json_f:
            config = Dict(json.load(json_f))
    elif ext == ".pkl":
        is_py = True
        with open(config_path, 'rb') as pkl_f:
            config = Dict(pickle.load(pkl_f))
    else:
        raise ValueError(
            f"config_path = {config_path} must be python, json, pkl, yml or yaml file."
        )
    return config, is_py
    def setUp(self):
        if yaml.__version__ >= '5.0':
            self.LOADER = yaml.FullLoader
        else:
            self.LOADER = yaml.Loader

        YamlIncludeConstructor.add_to_loader_class()
        self.assertIn(YamlIncludeConstructor.DEFAULT_TAG_NAME,
                      self.LOADER.yaml_constructors)
Exemple #8
0
def main(in_, template_path, root, set_, output_file, output_dir):
    YamlIncludeConstructor.add_to_loader_class(loader_class=FullLoader)

    with open(in_, "r") as stream:
        yaml = load(stream, Loader=FullLoader)

    template_args = get_template_args(yaml, root, set_)
    output_path = make_output_path(output_file, output_dir)

    secrender(template_path, template_args, output_path)
Exemple #9
0
def load_yaml_data(conf_path: Path,
                   quiet: bool = False,
                   _warning: bool = True) -> AnyByStrDict:

    YamlIncludeConstructor.add_to_loader_class(loader_class=yaml.FullLoader,
                                               base_dir=conf_path.parent)

    try:
        with open(conf_path) as f:
            return yaml.load(f, Loader=yaml.FullLoader)
    except yaml.parser.ParserError as e:
        raise InvalidConfigFileError(conf_path, quiet) from e
Exemple #10
0
    def __init__(self, config_path: str = "config/"):
        self.configured = False

        YamlIncludeConstructor.add_to_loader_class(
            loader_class=yaml.FullLoader, base_dir=config_path)

        with open(config_path + 'config.yaml', 'r') as config_yaml:
            config = yaml.load(config_yaml, Loader=yaml.FullLoader)
            self.sensors = [
                QSensor(s["name"], s["units"]) for s in config["sensors"]
            ]
            self.relays = [QRelay(r["name"], False) for r in config["relays"]]

            self.configured = True
Exemple #11
0
    def __analyze_file_yaml(self):
        self.__logger.debug(f"CONFIG_YAML:\n{json.dumps(self.yaml_file_name, indent=2)}")
        YamlIncludeConstructor.add_to_loader_class(loader_class=yaml.FullLoader, base_dir='.')
        with open(self.yaml_file_name) as yml:
            conf_yaml = yaml.load(yml, Loader=yaml.FullLoader)
        self.__logger.debug(f"CONFIG_YAML:\n{json.dumps(conf_yaml, indent=2)}")

        # Analyze images info
        for key, val in conf_yaml["vm"].items():
            for nm, inf in val.items():
                init_img = inf.get('init_img') or ""
                name_intrf = inf.get('name_intrf') or ""
                plat = inf.get('platform') or ""
                # if 'init_img' in inf:
                #     init_img = inf['init_img']
                self.imgs_info[nm] = ImageInfo(
                    name=nm, 
                    flavor_id=inf.get('flavor'), 
                    image=inf.get('image'), 
                    dev_type=key, 
                    init_img=inf.get('init_img'), 
                    name_intrf=inf.get('name_intrf'), 
                    plat=inf.get('platform'),
                    vol_en = inf.get('volume_enable'),
                    vol_img = inf.get('volume_image'),
                    vol_dev = inf.get('volume_device')
                    )

        # Analyze network links
        for key, val in conf_yaml["networks"].items():
            # print(f'{key}')
            name = "link_" + str(key)
            self.links_info[key] = LinkInfo(name=name, num=key, val=val)
            self.__logger.debug(f"LinkInfo: {self.links_info[key]}")

        used_net_links = list()
        # Analyze routers
        if 'routers' in conf_yaml:
            for key, val in conf_yaml["routers"].items():
                # self.__logger.info(f"net: {str(self.links_info)}")
                self.rtrs_info[key] = RouterInfo(name=key, val=val, net=self.links_info)
                self.__logger.debug(f'{self.rtrs_info[key]}')

        # Analyze servers
        if 'servers' in conf_yaml:
            for key, val in conf_yaml["servers"].items():
                self.srvs_info[key] = ServerInfo(name=key, val=val, net=self.links_info)
                self.__logger.debug(f'{self.srvs_info[key]}')

        self.__logger.debug(str(self.__str__()))
Exemple #12
0
def read_config_file(fpath):
    """
    Read configuration from a YAML file at path `fpath`.  The file
    can include other files using the `!include` directive.
    """

    basedir = fpath.parent
    YamlIncludeConstructor.add_to_loader_class(
        loader_class=SafeLoader, base_dir=basedir)

    with open(fpath, 'r') as class_conf:
        config = yaml.load(class_conf, Loader=SafeLoader)

    return config
Exemple #13
0
def main():
    args = argument_parser()

    print('base_dir: {}'.format(args.base_dir))
    print('field_template_file: {}'.format(args.field_template_file))
    print('output_file: {}'.format(args.output_file))

    YamlIncludeConstructor.add_to_loader_class(loader_class=yaml.FullLoader,
                                               base_dir=args.base_dir)

    with open(args.field_template_file) as f:
        data = yaml.load(f, Loader=yaml.FullLoader)

    with open(args.output_file, 'w') as file:
        yaml.dump(data, file, default_flow_style=False, sort_keys=False)
Exemple #14
0
    def __init__(self, *args, **kwargs):
        super(cls, self).__init__(*args, **kwargs)

        def yaml_join(loader, node):
            seq = loader.construct_sequence(node)
            return ' '.join([str(i) for i in seq])

        def yaml_remove_arg(loader, node):
            orig, remove = node.value
            args = loader.construct_scalar(orig)
            return " ".join(a for a in args.split() if a != remove.value)

        def folder_constructor(loader, node):
            return Folder(node.value)

        def union_constructor(loader, node):
            return Union(node.value)

        self.add_constructor(u'tag:yaml.org,2002:map', from_yaml)
        self.add_constructor(u'tag:yaml.org,2002:omap', from_yaml)
        self.add_constructor('!join', yaml_join)
        self.add_constructor('!remove_arg', yaml_remove_arg)
        self.add_constructor("!folder", folder_constructor)
        self.add_constructor("!union", union_constructor)
        self.add_constructor('!include',
                             YamlIncludeConstructor(base_dir=base_dir))
Exemple #15
0
 def setUpClass(cls) -> None:
     reader_table = [
         (re.compile(r'^.+\.(([yY][mM][lL])|([Yy][aA][mM][lL]))$'), YamlReader),
         (re.compile(r'^.+\.([j][2])$'), YamlReader),
     ]
     constructor = YamlIncludeConstructor(reader_map=reader_table)
     yaml.FullLoader.add_constructor('!inc', constructor)
Exemple #16
0
    def setUp(self):
        from yaml import SafeLoader, Loader
        self.LOADER_CLASSES = [SafeLoader, Loader]
        try:
            from yaml import CSafeLoader
        except ImportError as err:
            print(err, file=stderr)
        else:
            self.LOADER_CLASSES.append(CSafeLoader)
        try:
            from yaml import CLoader
        except ImportError as err:
            print(err, file=stderr)
        else:
            self.LOADER_CLASSES.append(CLoader)

        for loader_cls in self.LOADER_CLASSES:
            YamlIncludeConstructor.add_to_loader_class(loader_cls)
Exemple #17
0
 def _add_constructors(self, base_path: str) -> None:
     _ = self  # Fake usage
     yaml.SafeLoader.add_constructor(  # type: ignore
         "!engine", partial(_custom_yaml_constructor, clstype=RealEngine))
     yaml.SafeLoader.add_constructor(  # type: ignore
         "!retry", partial(_custom_yaml_constructor, clstype=RetryHandler))
     yaml.SafeLoader.add_constructor(  # type: ignore
         "!include", YamlIncludeConstructor(base_dir=base_path))
     yaml.SafeLoader.add_constructor(  # type: ignore
         "!env", _custom_env_tag)
Exemple #18
0
def run_config(config_path: Path) -> None:
    """Parse a YAML config file, returning the validated ILLIXR system config."""
    YamlIncludeConstructor.add_to_loader_class(
        loader_class=yaml.FullLoader, base_dir=config_path.parent,
    )

    with config_path.open() as f:
        config = yaml.full_load(f)

    with (root_dir / "runner/config_schema.yaml").open() as f:
        config_schema = yaml.safe_load(f)

    jsonschema.validate(instance=config, schema=config_schema)
    fill_defaults(config, config_schema)

    loader = config["loader"]["name"]

    if loader not in loaders:
        raise RuntimeError(f"No such loader: {loader}")
    loaders[loader](config)
Exemple #19
0
    def __init__(self, config_path: Path):
        self.batch_index = None
        self.yaml_path = None
        self.db_path = None

        self.config_path = config_path.absolute()

        YamlIncludeConstructor.add_to_loader_class(
            loader_class=yaml.FullLoader,
            base_dir=config_path.absolute().parent)

        try:
            self.data = self.apply_schema(self.config_path)
        except SchemaError as exc:
            sys.exit(exc.code)

        try:
            self.do_checks(self.data)
        except Error as exc:
            sys.exit(exc)

        self.batch_mode = 'batch_simulations' in self.data
        if self.batch_mode:
            self.batch_simulations = self.data['batch_simulations']
Exemple #20
0
class MultiLoaderTestCase(unittest.TestCase):

    constructor = YamlIncludeConstructor()

    def setUp(self):
        yaml.add_constructor('!include', self.constructor)

    def test_full_load_all_yaml(self):
        txt = dedent('''
        ---
        file1: !include tests/data/include.d/1.yaml

        ---
        file2: !include tests/data/include.d/2.yaml
        ''').strip()
        iterable = yaml.full_load_all(txt)
        for i, data in enumerate(iterable):
            if i == 0:
                self.assertEqual(data, {'file1': YAML1})
            elif i == 1:
                self.assertEqual(data, {'file2': YAML2})
            else:
                raise RuntimeError()
Exemple #21
0
    def containers(self, yaml_config, render_context=None, base_dir=None):
        if isfile(u"{}".format(yaml_config)):
            if base_dir:
                YamlIncludeConstructor.add_to_loader_class(base_dir=base_dir)
            else:
                base_dir = u"{}".format(dirname(yaml_config))
                YamlIncludeConstructor.add_to_loader_class(base_dir=base_dir)
            handle = open(yaml_config)
            config = handle.read()
            handle.close()
        else:
            YamlIncludeConstructor.add_to_loader_class(base_dir=base_dir)
            config = yaml_config

        config = Environment(loader=FileSystemLoader(base_dir or '.')).\
                                from_string(config).render(render_context or {})

        for doc in yaml.load_all(config, Loader=yaml.FullLoader):
            try:
                yield SparcConfigContainer(doc)
            except BrokenImplementation:
                raise ValueError(
                    "expected yaml_config to contain valid yaml file path or string: {}"
                    .format(yaml_config))
Exemple #22
0
import requests
from sp_api.tests.base_api import BaseApi
import json
from yamlinclude import YamlIncludeConstructor
from sp_api.tests.base_test import BaseTest
from yaml import load_all

class SmsCampaign(BaseApi):

    def __init__(self):
        super().__init__()
        self.url = '{}/{}'.format(self.api_settings['url_api'], 'sms/send')
        # self.url = '{}/{}'.format(self.api_settings['url_api'], 'sms/campaigns')

    def add_campaign(self, requests_info):
        self.headers.update({'Content-Type': 'application/json'})
        response = requests.post(self.url, headers=self.headers, json=requests_info)
        return response.json()


if __name__ == '__main__':
    # add_campaign
    resource_dir = BaseTest().conf['resourses']
    YamlIncludeConstructor.add_to_loader_class(base_dir=resource_dir + '/requests/sms/campaign')
    with open('{}/{}'.format(resource_dir, "requests/sms/campaign/add_campaigns.yaml")) as f:
        for request_data in load_all(f):
            print(json.dumps(request_data, indent=4, ensure_ascii=False))
            added_campaign = SmsCampaign().add_campaign(request_data)
            print(json.dumps(added_campaign, indent=4, ensure_ascii=False))

Exemple #23
0
 def setUp(self):
     for loader_cls in YAML_LOADERS:
         YamlIncludeConstructor.add_to_loader_class(loader_cls)
Exemple #24
0
def read_vars(vars_file):
    YamlIncludeConstructor.add_to_loader_class(
        loader_class=BuildLoader, base_dir=os.path.dirname(vars_file))
    with open(vars_file, 'r') as file_open:
        used = yaml.load(file_open.read(), BuildLoader)

    used.setdefault('environment', [])
    used.setdefault('runtime_environment', [])
    used.setdefault('runtime_interpreted', {})
    used.setdefault('runtime_postprocess', [])

    current_vars = {}
    if 'extends' in used:
        current_vars, config = read_vars(used['extends'])

        no_interpreted = set()
        no_interpreted.update(config.get('no_interpreted', []))
        no_interpreted.update(used.get('no_interpreted', []))
        used['no_interpreted'] = list(no_interpreted)

        environment = config['environment']
        runtime_environment = config['runtime_environment']
        for e in used['runtime_environment']:
            if e in environment:
                environment.remove(e)
            runtime_environment.append(e)
        for name, interpreted in config.get('runtime_interpreted', {}).items():
            if name in used['runtime_interpreted']:
                if interpreted is list and used['runtime_interpreted'][
                        name] is list:
                    used['runtime_interpreted'][name] += interpreted
                else:
                    value = {'vars': []}
                    if interpreted is list:
                        value['vars'] += interpreted
                    else:
                        clone = copy.copy(interpreted)
                        if 'vars' in clone:
                            value['vars'] += clone['vars']
                            del clone['vars']
                        value.update(clone)
                    if used['runtime_interpreted'][name] is list:
                        value['vars'] += used['runtime_interpreted'][name]
                    else:
                        clone = copy.copy(used['runtime_interpreted'][name])
                        if 'vars' in clone:
                            value['vars'] += clone['vars']
                            del clone['vars']
                        value.update(clone)
            else:
                used['runtime_interpreted'][name] = interpreted
        used['runtime_postprocess'] += config.get('runtime_postprocess', [])
        for e in used['environment']:
            if e in runtime_environment:
                runtime_environment.remove(e)
            environment.append(e)
        used['environment'] = environment
        used['runtime_environment'] = runtime_environment

    new_vars = do_process(used, used.get('vars', {}))

    update_paths = []
    for update_path in used.get('update_paths', []):
        split_path = update_path.split('.')
        for i in range(len(split_path)):
            update_paths.append('.'.join(split_path[:i + 1]))
    update_vars(current_vars, new_vars, set(update_paths))
    return current_vars, used
Exemple #25
0
# Ravestate class which represents a cluster of module configurations
# for a single context instance.

from ravestate import module
from collections import defaultdict
from typing import List, Any

import yaml
from yamlinclude import YamlIncludeConstructor

YamlIncludeConstructor.add_to_loader_class(yaml.SafeLoader)

from reggol import get_logger

logger = get_logger(__name__)


class Configuration:
    """
    The Configuration class maintains a dictionary of key-value stores, which
    represent configuration entries for specific named modules. The key-value
    stores may be successively updated with consecutive yaml files, where each
    yaml document has the following content:

      ---
      module: module-name
      config:
        key-a: value-a
        key-b: value-b
        # etc
      ---
Exemple #26
0
import os
import logging
import logging.config
import argparse
from datetime import datetime
from zipfile import ZipFile, ZIP_DEFLATED
from dotenv import load_dotenv
import yaml
from yamlinclude import YamlIncludeConstructor
from plugins import plugin_loader

VERSION = '1.0'
BASE_DIR = os.path.dirname(__file__)
CONF_DIR = os.path.join(BASE_DIR, 'config')
OUTPUT_DIR = os.path.join(BASE_DIR, 'output')
YamlIncludeConstructor.add_to_loader_class(loader_class = yaml.SafeLoader, base_dir = CONF_DIR)
load_dotenv()

os.makedirs(OUTPUT_DIR, exist_ok = True)
logging.config.fileConfig(os.path.join(CONF_DIR, 'logging.conf'))
logger = logging.getLogger('report')

def read_config(filename: str):
    """
    Read the configuration file (YAML)
    """
    logger.info(f"reading configuration file {filename}")
    with open(filename, 'r') as config:
        try:
            return yaml.safe_load(config)
        except yaml.YAMLError as ex:
Exemple #27
0
    def __new__(cls):
        if cls._instance is None:
            parser = argparse.ArgumentParser()
            parser.add_argument('-i',
                                '--id',
                                type=str,
                                help='Unique client ID.')
            parser.add_argument('-p',
                                '--port',
                                type=str,
                                help='The port number for running a server.')
            parser.add_argument('-c',
                                '--config',
                                type=str,
                                default='./config.yml',
                                help='Federated learning configuration file.')
            parser.add_argument('-s',
                                '--server',
                                type=str,
                                default=None,
                                help='The server hostname and port number.')
            parser.add_argument(
                '-d',
                '--download',
                action='store_true',
                help='Download the dataset to prepare for a training session.')
            parser.add_argument('-l',
                                '--log',
                                type=str,
                                default='info',
                                help='Log messages level.')

            args = parser.parse_args()
            Config.args = args

            if Config.args.id is not None:
                Config.args.id = int(args.id)
            if Config.args.port is not None:
                Config.args.port = int(args.port)

            numeric_level = getattr(logging, args.log.upper(), None)

            if not isinstance(numeric_level, int):
                raise ValueError(f'Invalid log level: {args.log}')

            logging.basicConfig(
                format='[%(levelname)s][%(asctime)s]: %(message)s',
                datefmt='%H:%M:%S')
            root_logger = logging.getLogger()
            root_logger.setLevel(numeric_level)

            cls._instance = super(Config, cls).__new__(cls)

            if 'config_file' in os.environ:
                filename = os.environ['config_file']
            else:
                filename = args.config

            YamlIncludeConstructor.add_to_loader_class(
                loader_class=yaml.SafeLoader, base_dir='./configs')

            if os.path.isfile(filename):
                with open(filename, 'r', encoding="utf8") as config_file:
                    config = yaml.load(config_file, Loader=yaml.SafeLoader)
            else:
                # if the configuration file does not exist, use a default one
                config = Config.default_config()

            Config.clients = Config.namedtuple_from_dict(config['clients'])
            Config.server = Config.namedtuple_from_dict(config['server'])
            Config.data = Config.namedtuple_from_dict(config['data'])
            Config.trainer = Config.namedtuple_from_dict(config['trainer'])
            Config.algorithm = Config.namedtuple_from_dict(config['algorithm'])

            if Config.args.server is not None:
                Config.server = Config.server._replace(
                    address=args.server.split(':')[0])
                Config.server = Config.server._replace(
                    port=args.server.split(':')[1])

            if Config.args.download:
                Config.clients = Config.clients._replace(total_clients=1)
                Config.clients = Config.clients._replace(per_round=1)

            if 'results' in config:
                Config.results = Config.namedtuple_from_dict(config['results'])
                if hasattr(Config().results, 'results_dir'):
                    Config.result_dir = Config.results.results_dir
                else:
                    datasource = Config.data.datasource
                    model = Config.trainer.model_name
                    server_type = Config.algorithm.type
                    Config.result_dir = f'./results/{datasource}/{model}/{server_type}/'

            if 'model' in config:
                Config.model = Config.namedtuple_from_dict(config['model'])

            if hasattr(Config().trainer, 'max_concurrency'):
                # Using a temporary SQLite database to limit the maximum number of concurrent
                # trainers
                Config.sql_connection = sqlite3.connect(
                    "/tmp/running_trainers.sqlitedb")
                Config().cursor = Config.sql_connection.cursor()

            # Customizable dictionary of global parameters
            Config.params: dict = {}

            # A run ID is unique to each client in an experiment
            Config.params['run_id'] = os.getpid()

            # Pretrained models
            Config.params['model_dir'] = "./models/pretrained/"
            Config.params['pretrained_model_dir'] = "./models/pretrained/"

        return cls._instance
Exemple #28
0
 def setUp(self):
     YamlIncludeConstructor.add_to_loader_class()
Exemple #29
0
#!/usr/bin/env python
import logging
try:
    from mpi4py import MPI
    H5FLOW_MPI = True
except Exception as e:
    logging.warning(f'Running without mpi4py because {e}')
    H5FLOW_MPI = False
from .core import H5FlowManager, resources
import argparse
import yaml
import sys
from yamlinclude import YamlIncludeConstructor
YamlIncludeConstructor.add_to_loader_class(loader_class=yaml.FullLoader,
                                           base_dir='./')


def run(configs,
        output_filename,
        input_filename=None,
        start_position=None,
        end_position=None,
        verbose=0):
    '''
        Execute a workflow specified by ``config`` writing to ``output_filename``.

        :param configs: ``list``, paths to configuration yamls to run in sequence

        :param output_filename: ``str``, path to output hdf5 file

        :param input_filename: ``str``, path to optional input file (default: ``None``)
Exemple #30
0
 def __init__(self):
     DottedDict.__init__(self)
     YamlIncludeConstructor.add_to_loader_class(
         loader_class=yaml.FullLoader)  # , base_dir='/your/conf/dir')