Ejemplo n.º 1
0
def provider(cls):
    """ Prepares a class to be an :class:`InfoProvider`.

    This decorator gathers all methods of the class marked with
    :func:`@provides <provides>`. These methods are gathered into the decorated
    class's ``providers`` dictionary.

    A YAML constructor will also be registered for the decorated class, so it
    can be instantiated automatically by :func:`yaml.load`.

    Although a decorator, no wrapper class is involved: the input class is
    returned, only its attributes are updated.

    .. todo:: Use meta class if possible to avoid information duplication
        (inheriting from InfoProvider *and* declaring ``@provider``).

    .. todo:: Inheritance and overriding. Need to think through what happens
        when a ``@provides`` method is overridden in a derived class.
        Explicitly specify the key again? Or implicitly inheriting the declared
        key. (It is possible if methods are registered by name, and the
        metaclass transforms the table for each *object* upon its
        instantiation. But this may not be desirable.)

    """
    def yaml_constructor(loader, node):
        return cls() if isinstance(node, yaml.ScalarNode) \
            else cls(**loader.construct_mapping(node, deep=True))

    yaml.add_constructor('!{0}'.format(cls.__name__), yaml_constructor)

    cls.providers = dict((i[1].provided_key, i[1]) for i in getmembers(cls)
                         if hasattr(i[1], 'provided_key'))

    # There is no wrapper class, the input class is returned.
    return cls
Ejemplo n.º 2
0
def write_yaml():
    add_constructor(
        resolver.BaseResolver.DEFAULT_MAPPING_TAG,
        lambda loader, node: OrderedDict(loader.construct_pairs(node)))
    yaml = YAML()
    yaml.default_flow_style = False
    i = 1

    if os.path.isfile("fruits.yaml"):
        os.remove("fruits.yaml")

    loaded_array = np.load(
        '/home/oza/pre-experiment/speeding/distiller/distiller/apputils/simple_gene.npz'
    )
    with open("fruits.yaml", "a") as yf:
        yaml.dump(
            {
                "version": i,
                "pruners": {
                    "conv1_pruner": {
                        "class": 'AutomatedGradualPruner',
                        'initial_sparsity': float(loaded_array['array_1']),
                        'final_sparsity': 0.3,
                        'weights': '[module.first.conv.weight]'
                    }
                },
                "fruit_2": {
                    "name": "orange",
                    "price": "200"
                }
            }, yf)
Ejemplo n.º 3
0
def build_index():
    """Create the index of all (YAML) sheets available."""
    from mathmaker import settings
    from ruamel import yaml
    # Below snippet from https://stackoverflow.com/a/21048064/3926735
    # to load roadmap.yaml using OrderedDict instead of dict
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    yaml.add_representer(OrderedDict, dict_representer)
    yaml.add_constructor(_mapping_tag, dict_constructor)
    index = dict()
    themes_dirs = [x
                   for x in os.listdir(settings.frameworksdir)
                   if os.path.isdir(settings.frameworksdir + x)]
    for theme in themes_dirs:
        folder_path = os.path.join(settings.frameworksdir, theme)
        folder_files = glob(folder_path + '/*.yaml')
        for folder_path in folder_files:
            subtheme = os.path.splitext(os.path.basename(folder_path))[0]
            with open(folder_path) as f:
                loaded_data = yaml.safe_load(f)
                if loaded_data is not None:
                    folder = OrderedDict(loaded_data)
                for sheet_name in folder:
                    directive = '_'.join([subtheme, sheet_name])
                    index[directive] = (theme, subtheme, sheet_name)
                    # Automatic add possibly missing sheet integration test
                    sheet_test_dir = Path(os.path.join(settings.testsdir,
                                                       'integration',
                                                       theme,
                                                       subtheme))
                    file_name = subtheme + '_' + sheet_name
                    sheet_file = Path(os.path.join(sheet_test_dir,
                                                   'test_{}.py'
                                                   .format(file_name)))
                    if not sheet_file.is_file():
                        sheet_test_dir.mkdir(parents=True, exist_ok=True)
                        template = TESTFILE_TEMPLATE
                        if (theme == 'mental_calculation'
                            and not sheet_name.startswith('W')):
                            template += \
                                MENTAL_CALCULATION_TESTFILE_TEMPLATE_ADDENDUM
                        with open(sheet_file, 'w') as f:
                            f.write(template.format(theme=theme,
                                                    subtheme=subtheme,
                                                    sheet_name=sheet_name))

    with open(settings.index_path, 'w') as f:
        json.dump(index, f, indent=4)
        f.write('\n')
Ejemplo n.º 4
0
    def __call__(self, cls):
        if not hasattr(self.target, 'backends'):
            target = self.target
            target.backends = dict()
            constructor_name = '!{0}'.format(target.__name__)
            log.debug("Adding YAML constructor for %r as %r", target.__name__,
                      constructor_name)
            yaml.add_constructor(constructor_name, YAMLConstructor(target))

        self.target.backends[self.id_] = cls
        return cls
    def load_calibration_file(self, filename):
        # Open and read in the calibration
        with open(args.calib, 'r') as f:
            yaml.add_constructor(u"tag:yaml.org,2002:opencv-matrix",
                                 opencv_matrix)
            data = yaml.load(
                f,
                Loader=yaml.Loader,
            )

        return data
Ejemplo n.º 6
0
    def __load_yaml(self, file: str) -> OrderedDict:
        add_constructor(
            resolver.BaseResolver.DEFAULT_MAPPING_TAG,
            lambda loader, node: OrderedDict(loader.construct_pairs(node)),
        )

        _yaml = YAML()
        _yaml.default_flow_style = False

        with open(file, "r", encoding="utf-8") as f:
            data = _yaml.load(f)
        return data
Ejemplo n.º 7
0
def register_xxx(**kw):
    from ruamel import yaml

    class XXX(yaml.comments.CommentedMap):
        @staticmethod
        def yaml_dump(dumper, data):
            return dumper.represent_mapping(u'!xxx', data)

        @classmethod
        def yaml_load(cls, constructor, node):
            data = cls()
            yield data
            constructor.construct_mapping(node, data)

    yaml.add_constructor(u'!xxx', XXX.yaml_load, constructor=yaml.RoundTripConstructor)
    yaml.add_representer(XXX, XXX.yaml_dump, representer=yaml.RoundTripRepresenter)
Ejemplo n.º 8
0
def write_yaml(yaml_list):
    add_constructor(resolver.BaseResolver.DEFAULT_MAPPING_TAG,
    lambda loader, node: OrderedDict(loader.construct_pairs(node)))
    yaml = YAML()
    yaml.default_flow_style = False
     
    with open("simplenet_cifar.schedule_agp.yaml", "r") as yf: #yaml基礎ファイル
        data = yaml.load(yf)    # safe_load()を使う
    print(data)
    for i in range(2):
        data['pruners']['conv' + str(i+1) + '_pruner']['final_sparsity'] = yaml_list[i]
    
    #data['version'] = k
    if os.path.isfile("fruits.yaml"):
        os.remove("fruits.yaml")
    
    with open("fruits.yaml", "a") as yf:
        yaml.dump(data, yf)
Ejemplo n.º 9
0
def main():
    """Main application entry point."""
    if len(sys.argv) != 3:
        print("Usage: yc-calc <input-file> <output-file>")
        sys.exit(1)

    infile = sys.argv[1]
    outfile = sys.argv[2]

    mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
    sequence_tag = yaml.resolver.BaseResolver.DEFAULT_SEQUENCE_TAG

    yaml.add_constructor(mapping_tag, dict_constructor,
                         Loader=RoundTripConstructor)
    yaml.add_constructor(sequence_tag, list_constructor,
                         Loader=RoundTripConstructor)

    yaml.add_representer(CalcDict, dict_representer,
                         Dumper=RoundTripRepresenter)
    yaml.add_representer(CalcList, list_representer,
                         Dumper=RoundTripRepresenter)

    try:
        with open(infile) as infp:
            top = YAML().load(infp)

            if not isinstance(top, CalcDict):
                type_name = type(top).__name__
                err("Top level element should be dict not {0}".format(type_name))

            defs = {}
            defs_str = top.get("DEFS", "")

            try:
                exec(defs_str, defs)
            except Exception as exc:
                err("Error executing DEFS: {0}".format(exc))

            CalcContainer.set_top(defs, top)
            write(top, outfile)
    except IOError as exc:
        err("Error opening file: {0}".format(exc))
    except yaml.YAMLError as exc:
        err("Error parsing input: {0}".format(exc))
Ejemplo n.º 10
0
def get_cfg(ruta_base=""):
    """Parse the YAML config"""
    pattern = re.compile(r"\$\{(.*)\}(.*)$")
    yaml.add_implicit_resolver("!env", pattern)

    def env_constructor(loader, node):
        """Constructor for environment variables"""
        value = loader.construct_scalar(node)
        env_var, remaining_path = pattern.match(value).groups()
        return os.environ[env_var] + remaining_path

    yaml.add_constructor('!env', env_constructor)
    ruta_cfg = ruta_base + "git_creds.yml"
    with open(ruta_cfg) as config:
        try:
            cfg = yaml.load(config, Loader=yaml.Loader)
        except yaml.YAMLError:
            logging.error("Error while loading config file.")
            raise
    return cfg
def main():
    sponsors = []

    def timestamp_constructor(loader, node):
        return dateutil.parser.parse(node.value)

    yaml.add_constructor("tag:yaml.org,2002:timestamp", timestamp_constructor)

    zoom_df = pd.read_excel(PATH_ZOOM_ACCOUNTS_WITH_PASSWORDS,
                            sheet_name="Sponsors").fillna("")

    zooms = []
    zoom_names = []
    for _, row in zoom_df.iterrows():
        zooms.append(row["Personal Meeting LINK"])
        zoom_names.append(row["uniqueid"])

    for sponsor_yaml in sorted(root.iterdir()):
        with sponsor_yaml.open() as f:
            raw_sponsor = yaml.load(f, Loader=ruamel.yaml.Loader)
            sponsor = raw_sponsor
        sponsors.append(sponsor)

    for i, sponsor in enumerate(sponsors):
        sponsor_name = sponsor["name"].lower().replace(" ",
                                                       "-").replace(",", "")
        if "hitachi" in sponsor_name.lower():
            sponsor_name = "hitachi"
        sponsor["rocketchat_channel"] = f"sponsor-{sponsor_name}"
        if "zoom_link" not in sponsor:
            assert sponsor["name"] in zoom_names[i], (sponsor["name"],
                                                      zoom_names[i])
            sponsor["zoom_link"] = zooms[i]
        else:
            print(sponsor_name, sponsor["zoom_link"])

    yaml.scalarstring.walk_tree(sponsors)

    with open(PATH_YAMLS / "sponsors.yml", "w") as f:
        yaml.dump(sponsors, f, Dumper=ruamel.yaml.RoundTripDumper)
Ejemplo n.º 12
0
def write_yaml(yaml_list):
    add_constructor(
        resolver.BaseResolver.DEFAULT_MAPPING_TAG,
        lambda loader, node: OrderedDict(loader.construct_pairs(node)))
    yaml = YAML()
    yaml.default_flow_style = False

    with open("fbneta_cifar.schedule_agp.yaml", "r") as yf:  #yaml基礎ファイル
        data = yaml.load(yf)  # safe_load()を使う
    for i in range(len(yaml_list)):

        if i == len(yaml_list) - 1:
            if yaml_list[i] <= 0.3:
                yaml_list[i] = 0.3
            data['pruners']['fc_pruner']['final_sparsity'] = yaml_list[i]

        else:
            data['pruners']['conv' + str(i + 1) +
                            '_pruner']['final_sparsity'] = yaml_list[i]
    if os.path.isfile("FB_gene1.yaml"):
        os.remove("FB_gene1.yaml")

    with open("FB_gene1.yaml", "a") as yf:
        yaml.dump(data, yf)
Ejemplo n.º 13
0
def setup_yaml_customobjects():
    yaml.add_representer(YAMLFile, representer_yamlfile)
    yaml.add_constructor(u'!yaml', constructor_yamlfile)
Ejemplo n.º 14
0
    # self.anchors = {}    # <<<< commented out
    return node


yaml.SafeLoader.compose_document = my_compose_document


# adapted from http://code.activestate.com/recipes/577613-yaml-include-support/
def yaml_include(loader, node):
    with open(node.value) as inputfile:
        return list(my_safe_load(inputfile, master=loader).values())[0]


#              leave out the [0] if your include file drops the key ^^^

yaml.add_constructor("!include", yaml_include, Loader=yaml.SafeLoader)


def my_safe_load(stream, Loader=yaml.SafeLoader, master=None):
    loader = Loader(stream)
    if master is not None:
        loader.anchors = master.anchors

    try:
        return loader.get_single_data()

    finally:
        loader.dispose()


class Config(BaseConfig):
Ejemplo n.º 15
0
    "stretch",
    "buster",
    "bullseye",
    "bookworm",
    "trixie",
)


# improve datetime parsing
def timestamp_constructor(loader, node):
    return dateparser.parse(node.value)


yaml.add_constructor(
    'tag:yaml.org,2002:timestamp',
    timestamp_constructor,
    Loader=yaml.SafeLoader,
)


def _platform(dist):
    if dist in RPM_DISTS:
        return "rpm"
    if dist in DEBIAN_DISTS:
        return "deb"
    return dist


def _dist_and_platform(dist):
    plat = _platform(dist)
    if dist == plat:
Ejemplo n.º 16
0
# -*- coding:utf-8 -*-
'''
@Time       : 2021/1/11 08:00
@Author     : 测试工程师Jane
@FileName   : __init__.py.py
@Description:
'''
from ruamel import yaml
print(yaml.__file__)


## 定义用户yaml tag handler
def join(loader, node):
    seq = loader.construct_sequence(node)
    return ''.join([str(i) for i in seq])


## 注册这个yaml tag handler,此方法用于拼接yaml内的字符
yaml.add_constructor('!join', join)
def ordered_load(stream, *args, **kwargs):
    yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                         construct_yaml_map)

    return yaml.load(stream=stream, *args, **kwargs)
Ejemplo n.º 18
0
            yaml.resolver.BaseResolver.DEFAULT_SCALAR_TAG, data, style='|')
    else:
        return dumper.represent_scalar(
            yaml.resolver.BaseResolver.DEFAULT_SCALAR_TAG, data)


def carry_over_compose_document(self):
    self.get_event()
    node = self.compose_node(None, None)
    self.get_event()
    # this prevents cleaning of anchors between documents in **one stream**
    # self.anchors = {}
    return node


yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                     dict_constructor)
yaml.add_representer(collections.OrderedDict, dict_representer)
yaml.add_representer(str, literal_str_representer)
yaml.composer.Composer.compose_document = carry_over_compose_document


@app.context_processor
def inject_sysinfo():
    return dict(sysinfo=dict(build=__version__))


@app.context_processor
def inject_user():
    return dict(user=session['me']) if 'me' in session else dict(user=None)

Ejemplo n.º 19
0
            import occo.exceptions
            raise occo.exceptions.ConfigurationError(
                '\nNo authentication file has been found on these locations:\n{0}\n'.format(
                '\n'.join(' - {0!r}'.format(p) for p in possible_auth_data_locations)))
        else:
            sys.stderr.write(
                'Using default authentication file: {0!r}\n'.format(cfg.auth_data_path))
    else:
        if not os.path.exists(cfg.auth_data_path):
            import occo.exceptions
            raise occo.exceptions.ConfigurationError('Specified authentication file does not exist: \'{0}\''.format(cfg.auth_data_path))
    #
    ## Setup logging
    #
    import logging
    import logging.config
    logging.config.dictConfig(
        cfg.configuration.get('logging', DEFAULT_LOGGING_CFG))

    log = logging.getLogger('occo')
    log.info('Starting up; PID = %d', os.getpid())

    return cfg

#
# Register YAML constructors
#
yaml.add_constructor('!python_import', PythonImport())
yaml.add_constructor('!yaml_import', YAMLImport(YAMLLoad_Parsed))
yaml.add_constructor('!text_import', YAMLImport(YAMLLoad_Raw))
Ejemplo n.º 20
0
# -*- coding: utf-8 -*-
#  Copyright (c) 2020. Distributed under the terms of the MIT License.
from collections import OrderedDict
from dataclasses import dataclass, asdict
from pathlib import Path
from typing import List

import numpy as np
from monty.json import MSONable
from monty.serialization import loadfn
from ruamel.yaml import add_constructor, resolver, YAML


# courtesy of https://qiita.com/konomochi/items/f5f53ba8efa07ec5089b
add_constructor(resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                lambda loader, node: OrderedDict(loader.construct_pairs(node)))

yaml = YAML()
yaml.default_flow_style = False


@dataclass
class Unitcell(MSONable):
    system: str
    vbm: float
    cbm: float
    ele_dielectric_const: List[List[float]]
    ion_dielectric_const: List[List[float]]

    @property
    def dielectric_constant(self):
Ejemplo n.º 21
0
        if hasattr(data, "__setstate__"):
            data.__setstate__(state)
        yield data

    def fetch_comment(self, comment):
        raise NotImplementedError


def _construct_include(loader: CustomYamlLoader, node: yaml.Node) -> t.Any:
    """Include file referenced at node."""
    filepath = os.path.abspath(
        os.path.join(loader.root, loader.construct_scalar(node)))
    return load_yaml_from_filepath(filepath, master=loader)


yaml.add_constructor("!include", _construct_include, CustomYamlLoader)


def load_yaml(stream: t.Union[str, t.IO],
              master: CustomYamlLoader = None,
              version: str = None) -> t.Any:
    """
    Own YAML-deserialization based on:
        * ruamel.yaml (some additional bugfixes vs regular PyYaml module)
        * unsafe loading (be sure to use it only for own datafiles)
        * YAML inclusion feature
    """
    loader = CustomYamlLoader(stream, version=version)
    if master is not None:
        loader.anchors = master.anchors
    try:
Ejemplo n.º 22
0
        data = cls.__new__(cls, **state)
        if hasattr(data, '__setstate__'):
            data.__setstate__(state)
        yield data

    def fetch_comment(self, comment):
        raise NotImplementedError


def construct_include(loader: CustomLoader, node: yaml.Node) -> t.Any:
    """Include file referenced at node."""
    filepath = os.path.abspath(os.path.join(loader.root, loader.construct_scalar(node)))
    return load_from_filepath(filepath, master=loader)


yaml.add_constructor('!include', construct_include, CustomLoader)


def load(
        stream: t.Union[str, t.IO],
        master: CustomLoader = None,
        version: str = None
) -> t.Any:
    """
    Own YAML-deserialization based on:
        * ruamel.yaml (some additional bugfixes vs regular PyYaml module)
        * unsafe loading (be sure to use it only for own datafiles)
        * YAML inclusion feature
    """
    loader = CustomLoader(stream, version=version)
    if master is not None:
Ejemplo n.º 23
0
import progressbar
from munch import Munch, unmunchify

_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    return dumper.represent_dict(iter(list(data.items())))


def dict_constructor(loader, node):
    return OrderedDict(loader.construct_pairs(node))


yaml.add_representer(OrderedDict, dict_representer)
yaml.add_constructor(_mapping_tag, dict_constructor)


class Framework(Munch):
    def __init__(self,
                 directory='test',
                 master_lattice=None,
                 overwrite=None,
                 runname='CLARA_240',
                 clean=False,
                 verbose=True,
                 sddsindex=0):
        super(Framework, self).__init__()
        # global master_lattice_location
        self.global_parameters = {
            'beam': rbf.beam(sddsindex=sddsindex),
Ejemplo n.º 24
0
logger = make_logger(__name__)

_yaml_mapping = resolver.BaseResolver.DEFAULT_MAPPING_TAG


def odict_represent(dumper, data):
    return dumper.represent_dict(data.iteritems())


def odict_construct(loader, node):
    return OrderedDict(loader.construct_pairs(node))


add_representer(OrderedDict, odict_represent)
add_constructor(_yaml_mapping, odict_construct)


class Meta(AbstractMeta):
    @property
    @abc.abstractmethod
    def meta_header(self):
        """"""

    @property
    @abc.abstractmethod
    def meta_version(self):
        """"""

    @abc.abstractmethod
    def meta_version_upgrade(self, from_version):
def ordered_load(stream, *args, **kwargs):
    yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                         construct_yaml_map)

    return yaml.safe_load(stream=stream, *args, **kwargs)
Ejemplo n.º 26
0
    # Otherwise, resolve that segment and recurse.
    return walk_dict(d[path[0]], path[1:])

def quantity_constructor(loader, node):
    """
    Constructs a `pq.Quantity` instance from a PyYAML
    node tagged as ``!Q``.
    """
    # Follows the example of http://stackoverflow.com/a/43081967/267841.
    value = loader.construct_scalar(node)
    return pq.Quantity(*split_unit_str(value))

# We avoid having to register !Q every time by doing as soon as the
# relevant constructor is defined.
yaml.add_constructor(u'!Q', quantity_constructor)

def load_instruments(conf_file_name, conf_path="/"):
    """
    Given the path to a YAML-formatted configuration file and a path within
    that file, loads the instruments described in that configuration file.
    The subsection of the configuration file is expected to look like a map from
    names to YAML nodes giving the class and instrument URI for each instrument.
    For example::

        ddg:
            class: !!python/name:instruments.srs.SRSDG645
            uri: gpib+usb://COM7/15

    Loading instruments from this configuration will result in a dictionary of
    the form
Ejemplo n.º 27
0
        if hasattr(data, '__setstate__'):
            data.__setstate__(state)
        yield data

    def fetch_comment(self, comment):
        raise NotImplementedError


def construct_include(loader: CustomLoader, node: yaml.Node) -> t.Any:
    """Include file referenced at node."""
    filepath = os.path.abspath(
        os.path.join(loader.root, loader.construct_scalar(node)))
    return load_from_filepath(filepath, master=loader)


yaml.add_constructor('!include', construct_include, CustomLoader)


def load(stream: t.Union[str, t.IO],
         master: CustomLoader = None,
         version: str = None) -> t.Any:
    """
    Own YAML-deserialization based on:
        * ruamel.yaml (some additional bugfixes vs regular PyYaml module)
        * unsafe loading (be sure to use it only for own datafiles)
        * YAML inclusion feature
    """
    loader = CustomLoader(stream, version=version)
    if master is not None:
        loader.anchors = master.anchors
    try:
Ejemplo n.º 28
0
# -*- coding: utf-8 -*-
from pkgutil import get_data
from collections import OrderedDict

import six
import ruamel.yaml as yaml

from .language import Language


# support `!include` directive
def yaml_include(loader, node):
    return yaml.load(get_data('data', node.value))


yaml.add_constructor("!include", yaml_include)


class LanguageDataLoader(object):
    _data = None

    def __init__(self, file=None):
        if isinstance(file, six.string_types):
            file = open(file)
        self.file = file

    def get_language_map(self):
        if self._data is None:
            self._load_data()
        return self._data
Ejemplo n.º 29
0
def represent_student(dumper, data):
    # type: (yaml.Dumper, Student) -> yaml.Node
    return dumper.represent_mapping(u'!Student', {
        u'id': data.id,
        u'name': data.name,
        u'email': data.email
    })


def construct_student(loader, node):
    # type: (yaml.Loader, yaml.Node) -> Student
    return Student(**loader.construct_mapping(node))


yaml.add_representer(Student, represent_student)
yaml.add_constructor(u'!Student', construct_student)


def represent_assignment(dumper, data):
    # type: (yaml.Dumper, Assignment) -> yaml.Node
    return dumper.represent_mapping(u'!Assignment', {
        u'id': data.id,
        u'name': data.name,
        u'full_credit': data.full_credit
    })


def construct_assignment(loader, node):
    # type: (yaml.Loader, yaml.Node) -> Assignment
    return Assignment(**loader.construct_mapping(node))
Ejemplo n.º 30
0
# -*- coding: utf-8 -*-
from pkgutil import get_data
from collections import OrderedDict

import six
import ruamel.yaml as yaml

from .language import Language


# support `!include` directive
def yaml_include(loader, node):
    return yaml.load(get_data('data', node.value))

yaml.add_constructor("!include", yaml_include)


class LanguageDataLoader(object):
    _data = None

    def __init__(self, file=None):
        if isinstance(file, six.string_types):
            file = open(file)
        self.file = file

    def get_language_map(self):
        if self._data is None:
            self._load_data()
        return self._data

    def get_languages(self):
Ejemplo n.º 31
0
    return walk_dict(d[path[0]], path[1:])


def quantity_constructor(loader, node):
    """
    Constructs a `pq.Quantity` instance from a PyYAML
    node tagged as ``!Q``.
    """
    # Follows the example of http://stackoverflow.com/a/43081967/267841.
    value = loader.construct_scalar(node)
    return pq.Quantity(*split_unit_str(value))


# We avoid having to register !Q every time by doing as soon as the
# relevant constructor is defined.
yaml.add_constructor(u'!Q', quantity_constructor)


def load_instruments(conf_file_name, conf_path="/"):
    """
    Given the path to a YAML-formatted configuration file and a path within
    that file, loads the instruments described in that configuration file.
    The subsection of the configuration file is expected to look like a map from
    names to YAML nodes giving the class and instrument URI for each instrument.
    For example::

        ddg:
            class: !!python/name:instruments.srs.SRSDG645
            uri: gpib+usb://COM7/15

    Loading instruments from this configuration will result in a dictionary of