Beispiel #1
0
    def __init__(self, *args, **kwargs):
        Loader.__init__(self, *args, **kwargs)

        self.add_constructor('tag:yaml.org,2002:map',
                             type(self).construct_yaml_map)
        self.add_constructor('tag:yaml.org,2002:omap',
                             type(self).construct_yaml_map)
Beispiel #2
0
    def load(cls,
             stream,
             constructors=None,
             multi_constructors=None,
             implicit_resolvers=None):
        loader = Loader(stream)

        cs = dict(cls._constructors)
        if constructors:
            cs.update(constructors)

        ir = dict(cls._implicit_resolvers)
        if implicit_resolvers:
            ir.update(implicit_resolvers)

        mcs = dict(cls._multi_constructors)
        if multi_constructors:
            mcs.update(multi_constructors)

        if cs:
            for name, constructor in cs.items():
                loader.add_constructor(name, constructor)

        if mcs:
            for name, constructor in mcs.items():
                loader.add_multi_constructor(name, constructor)

        if ir:
            for name, pattern in ir.items():
                loader.add_implicit_resolver(name, pattern, None)

        try:
            return loader.get_single_data()
        finally:
            loader.dispose()
Beispiel #3
0
    def register(cls):
        """Closure registering the passed class."""

        # Test the presence and usability of the functions
        try:
            tested = cls.to_yaml, cls.from_yaml
        except AttributeError:
            raise TypeError('Missing YAML serialization method')

        if not all(isinstance(f, Callable) for f in tested):
            raise TypeError('YAML serialization method(s) are not callable')

        # Make conversion handlers
        def dump(dumper: Dumper, value: Any) -> yaml.Node:
            return type.represent(dumper)(tag, cls.to_yaml(value))

        def load(loader: Loader, node: yaml.Node) -> Any:
            return cls.from_yaml(type.construct(loader)(node))

        # Register conversions
        Dumper.add_representer(cls, dump)
        Loader.add_constructor(tag, load)

        if pattern is not None:
            regexp = re.compile(pattern)
            Dumper.add_implicit_resolver(tag, regexp, None)
            Loader.add_implicit_resolver(tag, regexp, None)

        return cls
Beispiel #4
0
def test_app_config():
    # Get configured auth params
    specified_app_config = utils.get_cluster_var('cartridge_app_config')
    if not specified_app_config:
        return

    # Get all configured instances
    configured_instances = utils.get_configured_instances()

    if not configured_instances:
        return

    # Get cartridge app config
    config_url = '%s/admin/config' % utils.get_any_instance_url()
    session = utils.get_authorized_session()

    response = session.get(config_url)
    assert response.status_code == 200

    loader = Loader(response.content)
    app_config = loader.get_data()

    # Check if app config is equal to configured one
    for section_name, section in specified_app_config.items():
        if section_is_deleted(section):
            assert section_name not in app_config
        else:
            assert section_name in app_config
            assert app_config[section_name] == section['body']
Beispiel #5
0
def load_yaml(fn, q, pid, output_lock, semp):

    # output_lock.acquire()
    # print('load_yaml({0}, {1}, {2})'.format(fn, q, pid))
    # output_lock.release()

    with open(fn) as file:
        q.put( (pid, 'size', file_size(file)) )

        semp.acquire()

        finished = threading.Event()
        update_thread = threading.Thread(target=updater, args=(finished, file, q, pid))
        update_thread.daemon = True
        update_thread.start()

        loader = Loader(file)
        stats = []
        while loader.check_data():
            stats.append( loader.get_data() )

        semp.release()

        finished.set()
        update_thread.join()

    q.put( (pid, 'done', stats) )
Beispiel #6
0
def get_yaml_docs():
    """Parse the YAML file"""
    source = read_yaml_file(args['infile'])

    if args.get('template'):
        source = read_yaml_file(args['template']) + source

    source_str = ''.join([line[0] for line in source])

    def mark_str(mark):
        line = source[mark.line]
        return ("In file " + line[1] + ", line " + str(line[2]) + ", column " +
                str(mark.column + 1) + ":\n" + line[0].rstrip() + "\n" +
                ' ' * mark.column + "^\n")

    # We iterate through all of the documents to properly diagnose errors,
    # because the load_all generator does not handle exceptions correctly.
    docs = []
    load = Loader(source_str)
    while load.check_data():
        try:
            doc = load.get_data()
        except yaml.YAMLError as err:
            sys.exit((mark_str(err.problem_mark) if err.problem_mark else "") +
                     (err.problem + "\n" if err.problem else "") +
                     (err.note + "\n" if err.note else ""))
        else:
            docs.append(doc)
    return docs
Beispiel #7
0
 def process_notify(self, notification):
     """Process events"""
     loader = Loader(self.events_stream)
     setattr(loader, 'notification', notification)
     setattr(loader, 'system', self.system)
     notifications = loader.get_data()
     for notify_name in notifications:
         logging.debug('Process "{}" notification'.format(notify_name))
         if notifications[notify_name] is not None:
             self.send_data(notifications[notify_name])
def check_conf_file(conf_file, instance_id, conf):
    assert conf_file.exists
    assert conf_file.user == 'tarantool'
    assert conf_file.group == 'tarantool'

    loader = Loader(conf_file.content_string)
    conf_file_dict = loader.get_data()

    assert instance_id in conf_file_dict
    assert conf_file_dict[instance_id] == conf
def load_bindings(root, binding_dirs):
    find_binding_files(binding_dirs)
    dts_compats = all_compats(root)

    compat_to_binding = {}
    # Maps buses to dictionaries that map compats to YAML nodes
    bus_to_binding = defaultdict(dict)
    compats = []

    # Add '!include foo.yaml' handling
    Loader.add_constructor('!include', yaml_include)

    # Code below is adapated from edtlib.py

    # Searches for any 'compatible' string mentioned in the devicetree
    # files, with a regex
    dt_compats_search = re.compile(
        "|".join(re.escape(compat) for compat in dts_compats)
    ).search

    for file in binding_files:
        with open(file, encoding="utf-8") as f:
            contents = f.read()

        if not dt_compats_search(contents):
            continue

        binding = yaml.load(contents, Loader=Loader)

        binding_compats = _binding_compats(binding)
        if not binding_compats:
            continue

        with open(file, 'r', encoding='utf-8') as yf:
            binding = merge_included_bindings(file,
                                              yaml.load(yf, Loader=Loader))

        for compat in binding_compats:
            if compat not in compats:
                compats.append(compat)

            if 'parent-bus' in binding:
                bus_to_binding[binding['parent-bus']][compat] = binding

            if 'parent' in binding:
                bus_to_binding[binding['parent']['bus']][compat] = binding

            compat_to_binding[compat] = binding

    if not compat_to_binding:
        raise Exception("No bindings found in '{}'".format(binding_dirs))

    extract.globals.bindings = compat_to_binding
    extract.globals.bus_bindings = bus_to_binding
    extract.globals.binding_compats = compats
Beispiel #10
0
 def process_measurements(self):
     """Process measurements"""
     loader = Loader(self.measurements_stream)
     setattr(loader, 'collector', self.collector)
     setattr(loader, 'system', self.system)
     setattr(loader, 'config', self.config)
     measurements = loader.get_data()
     for measurement_name in measurements:
         logging.debug('Process "{}" measurements: {}'.format(
             measurement_name, measurements[measurement_name]))
         for measurement in measurements[measurement_name]:
             self.send_data(measurement)
Beispiel #11
0
class _BaseLoader(Loader):
    """
    YAML loader with additional features related to mux
    """
    Loader.add_constructor(u'!include', lambda loader,
                           node: mux.Control(YAML_INCLUDE))
    Loader.add_constructor(u'!using',
                           lambda loader, node: mux.Control(YAML_USING))
    Loader.add_constructor(u'!remove_node',
                           lambda loader, node: mux.Control(YAML_REMOVE_NODE))
    Loader.add_constructor(u'!remove_value',
                           lambda loader, node: mux.Control(YAML_REMOVE_VALUE))
Beispiel #12
0
def OrderedYaml():
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    return Loader, Dumper
Beispiel #13
0
def dump_to_yaml(data, yamlFile):
    """
    Dump the data to a yaml file.
    See: https://gist.github.com/oglops/c70fb69eef42d40bed06
    """
    def noop(self, *args, **kw):
        "Don't emit tags: see https://stackoverflow.com/a/48823424/7874784"
        pass

    yaml.emitter.Emitter.process_tag = noop
    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    Dumper.add_representer(str, SafeRepresenter.represent_str)
    with open(yamlFile, 'w') as outfile:
        outfile.writelines(yaml.dump(data, default_flow_style=False))
Beispiel #14
0
    def yaml_dumper():
        try:
            from yaml import CLoader as Loader, CDumper as Dumper
        except ImportError:
            from yaml import Loader, Dumper
        from yaml.representer import SafeRepresenter
        _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

        def dict_representer(dumper, data):
            return dumper.represent_dict(data.iteritems())

        def dict_constructor(loader, node):
            return OrderedDict(loader.construct_pairs(node))

        Dumper.add_representer(OrderedDict, dict_representer)
        Loader.add_constructor(_mapping_tag, dict_constructor)

        Dumper.add_representer(str, SafeRepresenter.represent_str)

        Dumper.add_representer(unicode, SafeRepresenter.represent_unicode)
        return Dumper
Beispiel #15
0
def ordered_yaml():
    """Support OrderedDict for yaml.

    Returns:
        yaml Loader and Dumper.
    """
    try:
        from yaml import CLoader as Loader, CDumper as Dumper
    except ImportError:
        from yaml import Loader, Dumper

    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    return Loader, Dumper
Beispiel #16
0
def _create_from_yaml(path, cls_node=TreeNode):
    """ Create tree structure from yaml stream """
    def tree_node_from_values(name, values):
        """ Create `name` node and add values  """
        node_children = []
        node_values = []
        for value in values:
            if isinstance(value, TreeNode):
                node_children.append(value)
            else:
                node_values.append(value)
        return cls_node(name, dict(node_values), children=node_children)

    def mapping_to_tree_loader(loader, node):
        """ Maps yaml mapping tag to TreeNode structure """
        def is_node(values):
            """ Whether these values represent node or just random values """
            if (isinstance(values, list) and values
                    and isinstance(values[0], (Value, TreeNode))):
                # When any value is TreeNode or Value, all of them are already
                # parsed and we can wrap them into self
                return True

        _value = loader.construct_pairs(node)
        objects = []
        for name, values in _value:
            if is_node(values):    # New node
                objects.append(tree_node_from_values(name, values))
            elif values is None:            # Empty node
                objects.append(cls_node(name))
            else:                           # Values
                objects.append(Value((name, values)))
        return objects
    Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                           mapping_to_tree_loader)

    with open(path) as stream:
        return tree_node_from_values('', yaml.load(stream, Loader))
Beispiel #17
0
def OrderedYaml():
    '''yaml orderedDict support'''
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)
    Loader.add_implicit_resolver(
        u'tag:yaml.org,2002:float',
        re.compile(
            u'''^(?:
         [-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
        |[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
        |\\.[0-9_]+(?:[eE][-+][0-9]+)?
        |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
        |[-+]?\\.(?:inf|Inf|INF)
        |\\.(?:nan|NaN|NAN))$''', re.X), list(u'-+0123456789.'))
    return Loader, Dumper
Beispiel #18
0
    def yaml_dumper():
        try:
            from yaml import CLoader as Loader, CDumper as Dumper
        except ImportError:
            from yaml import Loader, Dumper
        from yaml.representer import SafeRepresenter
        _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

        def dict_representer(dumper, data):
            return dumper.represent_dict(iteritems(data))

        def dict_constructor(loader, node):
            return OrderedDict(loader.construct_pairs(node))

        Dumper.add_representer(OrderedDict, dict_representer)
        Loader.add_constructor(_mapping_tag, dict_constructor)

        def str_representer_pipestyle(dumper, data):
            style = '|' if '\n' in data else None
            return dumper.represent_scalar('tag:yaml.org,2002:str',
                                           data,
                                           style=style)

        Dumper.add_representer(str, str_representer_pipestyle)

        if not PY3:

            def unicode_representer_pipestyle(dumper, data):
                style = u'|' if u'\n' in data else None
                return dumper.represent_scalar(u'tag:yaml.org,2002:str',
                                               data,
                                               style=style)

            Dumper.add_representer(unicode, unicode_representer_pipestyle)

        return Dumper
Beispiel #19
0
class _BaseLoader(Loader):
    """
    YAML loader with additional features related to mux
    """
    Loader.add_constructor(u'!include', lambda *_: mux.Control(YAML_INCLUDE))
    Loader.add_constructor(u'!using', lambda *_: mux.Control(YAML_USING))
    Loader.add_constructor(u'!remove_node',
                           lambda *_: mux.Control(YAML_REMOVE_NODE))
    Loader.add_constructor(u'!remove_value',
                           lambda *_: mux.Control(YAML_REMOVE_VALUE))
    Loader.add_constructor(u'!filter-only',
                           lambda *_: mux.Control(YAML_FILTER_ONLY))
    Loader.add_constructor(u'!filter-out',
                           lambda *_: mux.Control(YAML_FILTER_OUT))
Beispiel #20
0
try:
    from yaml import load, dump
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
    from yaml import Loader, Dumper

# Only turn on converting all strings to unicode by the YAML loader
# if running Python 2.7 or higher. 2.6 does not seem to like unicode dict keys.
# ???
#
if sys.version_info[0] != 2 or sys.version_info[1] >= 7:

    def construct_yaml_unistr(self, node):
        return self.construct_scalar(node)

    Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_unistr)

EXP_SCRIPT_DIRECTORY = ''

import constants
from constants import EventConstants, DeviceConstants
from constants import KeyboardConstants, MouseConstants, EyeTrackerConstants

from util import print2err, printExceptionDetailsToStdErr, ioHubError
from util import fix_encoding, OrderedDict, module_directory, updateDict
from util import isIterable, getCurrentDateTimeString, convertCamelToSnake
from util import ProgressBarDialog, MessageDialog, FileDialog, ioHubDialog
from util import win32MessagePump

fix_encoding.fix_encoding()
Beispiel #21
0
def _create_from_yaml(path, cls_node=tree.TreeNode):
    """ Create tree structure from yaml stream """
    def tree_node_from_values(name, values):
        """ Create `name` node and add values  """
        node = cls_node(str(name))
        using = ''
        for value in values:
            if isinstance(value, tree.TreeNode):
                node.add_child(value)
            elif isinstance(value[0], tree.Control):
                if value[0].code == YAML_INCLUDE:
                    # Include file
                    ypath = value[1]
                    if not os.path.isabs(ypath):
                        ypath = os.path.join(os.path.dirname(path), ypath)
                    if not os.path.exists(ypath):
                        raise ValueError("File '%s' included from '%s' does not "
                                         "exist." % (ypath, path))
                    node.merge(_create_from_yaml('/:' + ypath, cls_node))
                elif value[0].code == YAML_USING:
                    if using:
                        raise ValueError("!using can be used only once per "
                                         "node! (%s:%s)" % (path, name))
                    using = value[1]
                    if using[0] == '/':
                        using = using[1:]
                    if using[-1] == '/':
                        using = using[:-1]
                elif value[0].code == YAML_REMOVE_NODE:
                    value[0].value = value[1]   # set the name
                    node.ctrl.append(value[0])    # add "blue pill" of death
                elif value[0].code == YAML_REMOVE_VALUE:
                    value[0].value = value[1]   # set the name
                    node.ctrl.append(value[0])
                elif value[0].code == YAML_MUX:
                    node.multiplex = True
            else:
                node.value[value[0]] = value[1]
        if using:
            if name is not '':
                for name in using.split('/')[::-1]:
                    node = cls_node(name, children=[node])
            else:
                using = using.split('/')[::-1]
                node.name = using.pop()
                while True:
                    if not using:
                        break
                    name = using.pop()  # 'using' is list pylint: disable=E1101
                    node = cls_node(name, children=[node])
                node = cls_node('', children=[node])
        return node

    def mapping_to_tree_loader(loader, node):
        """ Maps yaml mapping tag to TreeNode structure """
        _value = []
        for key_node, value_node in node.value:
            if key_node.tag.startswith('!'):    # reflect tags everywhere
                key = loader.construct_object(key_node)
            else:
                key = loader.construct_python_str(key_node)
            value = loader.construct_object(value_node)
            _value.append((key, value))
        objects = ListOfNodeObjects()
        for name, values in _value:
            if isinstance(values, ListOfNodeObjects):   # New node from list
                objects.append(tree_node_from_values(name, values))
            elif values is None:            # Empty node
                objects.append(cls_node(str(name)))
            else:                           # Values
                objects.append(Value((name, values)))
        return objects

    def mux_loader(loader, obj):
        """
        Special !mux loader which allows to tag node as 'multiplex = True'.
        """
        if not isinstance(obj, yaml.ScalarNode):
            objects = mapping_to_tree_loader(loader, obj)
        else:   # This means it's empty node. Don't call mapping_to_tree_loader
            objects = ListOfNodeObjects()
        objects.append((tree.Control(YAML_MUX), None))
        return objects

    Loader.add_constructor(u'!include',
                           lambda loader, node: tree.Control(YAML_INCLUDE))
    Loader.add_constructor(u'!using',
                           lambda loader, node: tree.Control(YAML_USING))
    Loader.add_constructor(u'!remove_node',
                           lambda loader, node: tree.Control(YAML_REMOVE_NODE))
    Loader.add_constructor(u'!remove_value',
                           lambda loader, node: tree.Control(YAML_REMOVE_VALUE))
    Loader.add_constructor(u'!mux', mux_loader)
    Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                           mapping_to_tree_loader)

    # Parse file name ([$using:]$path)
    path = __RE_FILE_SPLIT.split(path, 1)
    if len(path) == 1:
        path = __RE_FILE_SUBS.sub(':', path[0])
        using = ["run"]
    else:
        nodes = __RE_FILE_SUBS.sub(':', path[0]).strip('/').split('/')
        using = [node for node in nodes if node]
        if not path[0].startswith('/'):  # relative path, put into /run
            using.insert(0, 'run')
        path = __RE_FILE_SUBS.sub(':', path[1])

    # Load the tree
    with open(path) as stream:
        loaded_tree = yaml.load(stream, Loader)
        if loaded_tree is None:
            return
        loaded_tree = tree_node_from_values('', loaded_tree)

    # Add prefix
    if using:
        loaded_tree.name = using.pop()
        while True:
            if not using:
                break
            loaded_tree = cls_node(using.pop(), children=[loaded_tree])
        loaded_tree = cls_node('', children=[loaded_tree])
    return loaded_tree
Beispiel #22
0
def _create_from_yaml(path, cls_node=TreeNode):
    """ Create tree structure from yaml stream """
    def tree_node_from_values(name, values):
        """ Create `name` node and add values  """
        node = cls_node(str(name))
        using = ''
        for value in values:
            if isinstance(value, TreeNode):
                node.add_child(value)
            elif isinstance(value[0], Control):
                if value[0].code == YAML_INCLUDE:
                    # Include file
                    ypath = value[1]
                    if not os.path.isabs(ypath):
                        ypath = os.path.join(os.path.dirname(path), ypath)
                    if not os.path.exists(ypath):
                        raise ValueError(
                            "File '%s' included from '%s' does not "
                            "exist." % (ypath, path))
                    node.merge(_create_from_yaml('/:' + ypath, cls_node))
                elif value[0].code == YAML_USING:
                    if using:
                        raise ValueError("!using can be used only once per "
                                         "node! (%s:%s)" % (path, name))
                    using = value[1]
                    if using[0] == '/':
                        using = using[1:]
                    if using[-1] == '/':
                        using = using[:-1]
                elif value[0].code == YAML_REMOVE_NODE:
                    value[0].value = value[1]  # set the name
                    node.ctrl.append(value[0])  # add "blue pill" of death
                elif value[0].code == YAML_REMOVE_VALUE:
                    value[0].value = value[1]  # set the name
                    node.ctrl.append(value[0])
                elif value[0].code == YAML_MUX:
                    node.multiplex = True
            else:
                node.value[value[0]] = value[1]
        if using:
            if name is not '':
                for name in using.split('/')[::-1]:
                    node = cls_node(name, children=[node])
            else:
                using = using.split('/')[::-1]
                node.name = using.pop()
                while True:
                    if not using:
                        break
                    name = using.pop()  # 'using' is list pylint: disable=E1101
                    node = cls_node(name, children=[node])
                node = cls_node('', children=[node])
        return node

    def mapping_to_tree_loader(loader, node):
        """ Maps yaml mapping tag to TreeNode structure """
        _value = []
        for key_node, value_node in node.value:
            if key_node.tag.startswith('!'):  # reflect tags everywhere
                key = loader.construct_object(key_node)
            else:
                key = loader.construct_python_str(key_node)
            value = loader.construct_object(value_node)
            _value.append((key, value))
        objects = ListOfNodeObjects()
        for name, values in _value:
            if isinstance(values, ListOfNodeObjects):  # New node from list
                objects.append(tree_node_from_values(name, values))
            elif values is None:  # Empty node
                objects.append(cls_node(str(name)))
            else:  # Values
                objects.append(Value((name, values)))
        return objects

    def mux_loader(loader, obj):
        """
        Special !mux loader which allows to tag node as 'multiplex = True'.
        """
        if not isinstance(obj, yaml.ScalarNode):
            objects = mapping_to_tree_loader(loader, obj)
        else:  # This means it's empty node. Don't call mapping_to_tree_loader
            objects = ListOfNodeObjects()
        objects.append((Control(YAML_MUX), None))
        return objects

    Loader.add_constructor(u'!include',
                           lambda loader, node: Control(YAML_INCLUDE))
    Loader.add_constructor(u'!using', lambda loader, node: Control(YAML_USING))
    Loader.add_constructor(u'!remove_node',
                           lambda loader, node: Control(YAML_REMOVE_NODE))
    Loader.add_constructor(u'!remove_value',
                           lambda loader, node: Control(YAML_REMOVE_VALUE))
    Loader.add_constructor(u'!mux', mux_loader)
    Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                           mapping_to_tree_loader)

    # Parse file name ([$using:]$path)
    path = __RE_FILE_SPLIT.split(path, 1)
    if len(path) == 1:
        path = __RE_FILE_SUBS.sub(':', path[0])
        using = ["run"]
    else:
        nodes = __RE_FILE_SUBS.sub(':', path[0]).strip('/').split('/')
        using = [node for node in nodes if node]
        if not path[0].startswith('/'):  # relative path, put into /run
            using.insert(0, 'run')
        path = __RE_FILE_SUBS.sub(':', path[1])

    # Load the tree
    with open(path) as stream:
        loaded_tree = yaml.load(stream, Loader)
        loaded_tree = tree_node_from_values('', loaded_tree)

    # Add prefix
    if using:
        loaded_tree.name = using.pop()
        while True:
            if not using:
                break
            loaded_tree = cls_node(using.pop(), children=[loaded_tree])
        loaded_tree = cls_node('', children=[loaded_tree])
    return loaded_tree
Beispiel #23
0
import dateutil.tz
import dateutil.parser
import yaml

try:
    from yaml import CLoader as YAML_Loader
except ImportError:
    from yaml import Loader as YAML_Loader

def time_constructor(loader, node):
    return dateutil.parser.parse(node.value)

YAML_Loader.add_constructor( "tag:yaml.org,2002:timestamp",
                             time_constructor )

def load(file_path):
    with open(file_path, 'r') as f:
        return yaml.load(f, Loader = YAML_Loader)
Beispiel #24
0
        'Python environment.')
    from yaml import Loader, Dumper

# Yaml module configuration


class unicode_folder(str):
    pass


class unicode_literal(str):
    pass


Loader.add_constructor(
    yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
    lambda loader, node: collections.OrderedDict(loader.construct_pairs(node)))
Dumper.add_representer(
    collections.OrderedDict, lambda dumper, data: dumper.represent_dict(
        (copy.deepcopy(key), value) for key, value in data.items()))
Dumper.add_representer(
    dict, lambda dumper, data: dumper.represent_dict(
        (copy.deepcopy(key), value) for key, value in data.items()))
Dumper.add_representer(
    np.ndarray, lambda dumper, data: dumper.represent_list(data.tolist()))
Dumper.add_representer(tuple, lambda dumper, data: dumper.represent_list(data))
Dumper.add_representer(
    unicode_folder, lambda dumper, data: dumper.represent_scalar(
        'tag:yaml.org,2002:str', data, style='>'))
Dumper.add_representer(
    unicode_literal, lambda dumper, data: dumper.represent_scalar(
Beispiel #25
0
## setup dumper for dumping OrderedDict ##
_MAPPING_TAG = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    """ Representer to represent special OrderedDict """
    return dumper.represent_dict(data.items())


def dict_constructor(loader, node):
    """ Construct an OrderedDict for dumping """
    return OrderedDict(loader.construct_pairs(node))


Dumper.add_representer(OrderedDict, dict_representer)
Loader.add_constructor(_MAPPING_TAG, dict_constructor)


class ModToolMakeYAML(ModTool):
    """ Make YAML file for GRC block bindings """
    name = 'makeyaml'
    description = 'Generate YAML files for GRC block bindings.'

    def __init__(self, blockname=None, **kwargs):
        ModTool.__init__(self, blockname, **kwargs)
        self.info['pattern'] = blockname

    def validate(self):
        """ Validates the arguments """
        ModTool._validate(self)
        if not self.info['pattern'] or self.info['pattern'].isspace():
Beispiel #26
0
def load(stream, constructors=None):
    loader = Loader(stream)
    constructors = constructors or {}

    if not "timedelta" in constructors:
        loader.add_constructor("!timedelta", _timedelta_contructor)
    if not "re" in constructors:
        loader.add_constructor("!re", _re_constructor)
    if not "ref" in constructors:
        loader.add_constructor("!ref", _ref_constructor)
    if not "obj" in constructors:
        loader.add_constructor("!obj", _obj_constructor)

    if constructors:
        for name, constructor in constructors.items():
            loader.add_constructor("!" + name, constructor)

    try:
        return loader.get_single_data()
    finally:
        loader.dispose()
Beispiel #27
0
# Spending against a budget line is allowed to go over its value by this factor
FUDGE_FACTOR = D("1.1")


def dict_constructor(loader, node):
    """Constructor for libyaml to use ordered dicts instead of dicts."""
    return collections.OrderedDict(loader.construct_pairs(node))


def num_constructor(loader, node):
    """Constructor for libyaml to translate numeric literals to Decimals."""
    return D(node.value)


# Give me ordered dictionaries back
YAML_Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                            dict_constructor)

# Parse floats as decimals
YAML_Loader.add_constructor("tag:yaml.org,2002:float", num_constructor)


def dec_ceil(d):
    """Get the ceiling of a decimal."""
    return d.to_integral_exact(ROUND_CEILING)


def dec_floor(d):
    """Get the floor of a decimal."""
    return d.to_integral_exact(ROUND_FLOOR)

Beispiel #28
0
def timestamp_constructor(loader: Loader, node: yaml.Node) -> datetime:
    """Custom constructor for datetime objects from YAML."""
    value = loader.construct_scalar(node)
    return parse_date(value)
Beispiel #29
0
def ordered_dict_constructor(loader: Loader, node: yaml.Node) -> OrderedDict:
    """Custom constructor for OrderedDict objects from YAML."""
    loader.flatten_mapping(node)
    return OrderedDict(loader.construct_pairs(node))
Beispiel #30
0
def _create_from_yaml(path, cls_node=TreeNode):
    """ Create tree structure from yaml stream """
    def tree_node_from_values(name, values):
        """ Create `name` node and add values  """
        node = cls_node(str(name))
        using = ''
        for value in values:
            if isinstance(value, TreeNode):
                node.add_child(value)
            elif isinstance(value[0], Control):
                if value[0].code == YAML_INCLUDE:
                    # Include file
                    ypath = value[1]
                    if not os.path.isabs(ypath):
                        ypath = os.path.join(os.path.dirname(path), ypath)
                    node.merge(_create_from_yaml(ypath, cls_node))
                elif value[0].code == YAML_USING:
                    if using:
                        raise ValueError("!using can be used only once per "
                                         "node! (%s:%s)" % (path, name))
                    using = value[1]
                    if using[0] == '/':
                        using = using[1:]
                    if using[-1] == '/':
                        using = using[:-1]
                elif value[0].code == YAML_REMOVE_NODE:
                    value[0].value = value[1]  # set the name
                    node.ctrl.append(value[0])  # add "blue pill" of death
                elif value[0].code == YAML_REMOVE_VALUE:
                    value[0].value = value[1]  # set the name
                    node.ctrl.append(value[0])
                elif value[0].code == YAML_JOIN:
                    node.multiplex = False
            else:
                node.value[value[0]] = value[1]
        if using:
            if name is not '':
                for name in using.split('/')[::-1]:
                    node = cls_node(name, children=[node])
            else:
                using = using.split('/')[::-1]
                node.name = using.pop()
                while True:
                    if not using:
                        break
                    name = using.pop()  # 'using' is list pylint: disable=E1101
                    node = cls_node(name, children=[node])
                node = cls_node('', children=[node])
        return node

    def mapping_to_tree_loader(loader, node):
        """ Maps yaml mapping tag to TreeNode structure """
        _value = loader.construct_pairs(node)
        objects = ListOfNodeObjects()
        for name, values in _value:
            if isinstance(values, ListOfNodeObjects):  # New node from list
                objects.append(tree_node_from_values(name, values))
            elif values is None:  # Empty node
                objects.append(cls_node(str(name)))
            else:  # Values
                objects.append(Value((name, values)))
        return objects

    def join_loader(loader, obj):
        """
        Special !join loader which allows to tag node as 'multiplex = False'.
        """
        if not isinstance(obj, yaml.ScalarNode):
            objects = mapping_to_tree_loader(loader, obj)
        else:  # This means it's empty node. Don't call mapping_to_tree_loader
            objects = ListOfNodeObjects()
        objects.append((Control(YAML_JOIN), None))
        return objects

    Loader.add_constructor(u'!include',
                           lambda loader, node: Control(YAML_INCLUDE))
    Loader.add_constructor(u'!using', lambda loader, node: Control(YAML_USING))
    Loader.add_constructor(u'!remove_node',
                           lambda loader, node: Control(YAML_REMOVE_NODE))
    Loader.add_constructor(u'!remove_value',
                           lambda loader, node: Control(YAML_REMOVE_VALUE))
    Loader.add_constructor(u'!join', join_loader)
    Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                           mapping_to_tree_loader)

    with open(path) as stream:
        return tree_node_from_values('', yaml.load(stream, Loader))
Beispiel #31
0
# Provide own implementation of ISO8601 timestamps
def timestamp_representer(dumper: Dumper, date: datetime) -> yaml.Node:
    """Custom representer for datetime objects in YAML."""
    return dumper.represent_scalar(TIMESTAMP_TAG, date.isoformat())


Dumper.add_representer(datetime, timestamp_representer)


def timestamp_constructor(loader: Loader, node: yaml.Node) -> datetime:
    """Custom constructor for datetime objects from YAML."""
    value = loader.construct_scalar(node)
    return parse_date(value)


Loader.add_constructor(TIMESTAMP_TAG, timestamp_constructor)


# Automatically serialize Path as string
def path_representer(dumper: Dumper, path: Path) -> yaml.Node:
    """Custom representer for Path objects in YAML."""
    return dumper.represent_scalar(STR_TAG, str(path))


Dumper.add_representer(Path, path_representer)
Dumper.add_representer(PosixPath, path_representer)
Dumper.add_representer(WindowsPath, path_representer)


# Serialize OrderedDict as standard yaml tag !!omap
# https://stackoverflow.com/questions/5121931/in-python-how-can-you-load-yaml-mappings-as-ordereddicts#21912744
Beispiel #32
0
sys.exit(0)

for fn in sys.argv[1:]:
    p = Process(target=load_yaml, args=(fn, q, fn))

    f_size = os.path.getsize(fn)
    # err("  Loading [%s] (%d)" % (fn,f_size))
    with open(fn) as file:

        finished = threading.Event()
        update_thread = threading.Thread(target=updater, args=(finished, file, file_size(file)))
        update_thread.daemon = True
        update_thread.start()
        
        loader = Loader(file)
        stats = []
        while loader.check_data():
            stats.append( loader.get_data() )

        finished.set()
        update_thread.join()

        #filters = [ TagFilter('total') ]
        #for filter in filters:
            #stats = filter.filter(stats)

        if not stats: continue

        kernel = TagFilter('kernel').filter(stats)[0]
        user = TagFilter('user').filter(stats)[0]
Beispiel #33
0
# Spending against a budget line is allowed to go over its value by this factor
FUDGE_FACTOR = D("1.1")


def dict_constructor(loader, node):
    """Constructor for libyaml to use ordered dicts instead of dicts."""
    return collections.OrderedDict(loader.construct_pairs(node))


def num_constructor(loader, node):
    """Constructor for libyaml to translate numeric literals to Decimals."""
    return D(node.value)


# Give me ordered dictionaries back
YAML_Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                            dict_constructor)

# Parse floats as decimals
YAML_Loader.add_constructor("tag:yaml.org,2002:float", num_constructor)


def dec_ceil(d):
    """Get the ceiling of a decimal."""
    return d.to_integral_exact(ROUND_CEILING)


def dec_floor(d):
    """Get the floor of a decimal."""
    return d.to_integral_exact(ROUND_FLOOR)

Beispiel #34
0
    def __init__(self, *args, **kwargs):
        Loader.__init__(self, *args, **kwargs)

        self.add_constructor('tag:yaml.org,2002:map', type(self).construct_yaml_map)
        self.add_constructor('tag:yaml.org,2002:omap', type(self).construct_yaml_map)
try:  # try using the libyaml if installed
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:  # else use default PyYAML loader and dumper
    from yaml import Loader, Dumper

Dumper.add_representer(Model, model_representer)
Dumper.add_representer(Ts, model_representer)
Dumper.add_representer(
    Markov, lambda dumper, model: model_representer(dumper, model, dict))
Dumper.add_representer(Automaton, automaton_representer)
Dumper.add_representer(Buchi, automaton_representer)
Dumper.add_representer(Fsa, automaton_representer)
Dumper.add_representer(Rabin, automaton_representer)

Loader.add_constructor(
    Model.yaml_tag,
    lambda loader, model: model_constructor(loader, model, Model))
Loader.add_constructor(
    Ts.yaml_tag, lambda loader, model: model_constructor(loader, model, Ts))
Loader.add_constructor(
    Markov.yaml_tag,
    lambda loader, model: model_constructor(loader, model, Markov, dict))
Loader.add_constructor(
    Automaton.yaml_tag, lambda loader, automaton: automaton_constructor(
        loader, automaton, Automaton))
Loader.add_constructor(
    Buchi.yaml_tag,
    lambda loader, automaton: automaton_constructor(loader, automaton, Buchi))
Loader.add_constructor(
    Fsa.yaml_tag,
    lambda loader, automaton: automaton_constructor(loader, automaton, Fsa))
Beispiel #36
0
def parse(opt_path: str, is_train: bool = True) -> NoneDict:
    """Parse options file.
    Args:
        opt_path (str): Option file path. Can be JSON or YAML
        is_train (str): Indicate whether in training or not. Default: True.
    Returns:
        (dict): Parsed Options
    """

    # check if configuration file exists
    if not os.path.isfile(opt_path):
        opt_path = os.path.join("options", "train" if is_train else "test",
                                opt_path)
        if not os.path.isfile(opt_path):
            raise ValueError(
                "Configuration file {} not found.".format(opt_path))

    ext = os.path.splitext(opt_path)[1].lower()
    if ext == '.json':
        import json
        # remove comments starting with '//'
        json_str = ''
        with open(opt_path, 'r') as f:
            for line in f:
                line = line.split('//')[0] + '\n'
                json_str += line
        opt = json.loads(json_str, object_pairs_hook=OrderedDict)
    elif ext in ['.yml', '.yaml']:
        import yaml
        import re
        with open(opt_path, mode='r') as f:
            try:
                # use SafeLoader's over Loader to prevent against arbitrary python object execution
                # Use C loaders if possible, faster
                from yaml import CLoader as Loader  #CSafeLoader as Loader
            except ImportError:
                from yaml import Loader  #SafeLoader as Loader
            _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

            def dict_constructor(loader, node):
                return OrderedDict(loader.construct_pairs(node))

            Loader.add_constructor(_mapping_tag, dict_constructor)
            # compiled resolver to correctly parse scientific notation numbers
            Loader.add_implicit_resolver(
                u'tag:yaml.org,2002:float',
                re.compile(
                    u'''^(?:
                [-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
                |[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
                |\\.[0-9_]+(?:[eE][-+]?[0-9]+)?
                |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
                |[-+]?\\.(?:inf|Inf|INF)
                |\\.(?:nan|NaN|NAN))$''', re.X), list(u'-+0123456789.'))
            opt = yaml.load(f, Loader=Loader)

    opt['is_train'] = is_train
    scale = opt.get('scale', 1)
    bm = opt.get('batch_multiplier', None)

    # datasets
    for phase, dataset in opt['datasets'].items():
        phase = phase.split('_')[0]
        dataset['phase'] = phase
        dataset['scale'] = scale
        is_lmdb = False
        image_path_keys = [
            "HR", "HR_bg", "LR", "A", "B", "AB", "lq", "gt", "ref"
        ]
        for key in image_path_keys:
            image_path = dataset.get('dataroot_' + key, None)
            if image_path is not None:
                if isinstance(image_path, str):
                    is_lmdb = os.path.splitext(
                        image_path)[1].lower() == ".lmdb"
                    image_path = [image_path]
                if isinstance(image_path, list):
                    image_path = [
                        os.path.normpath(os.path.expanduser(path))
                        for path in image_path
                    ]
                    if len(image_path) == 1:
                        # if it's a single-item list, act as if it was a str instead of a list
                        image_path = image_path[0]
                    dataset['dataroot_' + key] = image_path
                else:
                    raise ValueError(
                        "Unexpected path type: {}. Either a single \
                        path or a list of paths are supported.".format(
                            type(image_path)))
        dataset['data_type'] = 'lmdb' if is_lmdb else 'img'

        HR_size = dataset.get('HR_size', None)
        if HR_size:
            dataset['crop_size'] = HR_size

        if phase == 'train':
            preprocess = dataset.get('preprocess', None)
            if preprocess is not None:
                crop_size = dataset.get('crop_size', None)
                aspect_ratio = dataset.get('aspect_ratio', None)
                load_size = dataset.get('load_size', None)
                center_crop_size = dataset.get('center_crop_size', None)

                if ('resize' in preprocess or 'scale_width' in preprocess
                        or 'scale_height' in preprocess
                        or 'scale_shortside' in preprocess):
                    assert load_size, "load_size not defined"
                    if crop_size:
                        # crop_size should be smaller than the size of loaded image
                        assert (load_size >= crop_size)
                if 'center_crop' in preprocess:
                    assert center_crop_size, "center_crop_size not defined"
                    if crop_size:
                        assert (center_crop_size >= crop_size)
                if 'fixed' in preprocess:
                    assert aspect_ratio, "aspect_ratio not defined"

            pre_crop = dataset.get('pre_crop', None)
            if scale != 1 and not pre_crop:
                if not preprocess:
                    dataset['preprocess'] = 'crop'
                else:
                    for popt in [
                            'scale_shortside', 'scale_height', 'scale_width',
                            'none'
                    ]:
                        if popt in preprocess:
                            raise ValueError(
                                f"Preprocess option {popt} can only be used with 1x scale."
                            )

        if phase == 'train' and bm:
            # compatibility with other forks
            dataset['virtual_batch_size'] = bm * dataset["batch_size"]
        if dataset.get('virtual_batch_size', None):
            dataset['virtual_batch_size'] = max(dataset['virtual_batch_size'],
                                                dataset["batch_size"])

        if phase == 'train' and 'subset_file' in dataset and dataset[
                'subset_file'] is not None:
            dataset['subset_file'] = os.path.normpath(
                os.path.expanduser(dataset['subset_file']))

        if 'lr_downscale_types' in dataset and dataset[
                'lr_downscale_types'] is not None:
            if isinstance(dataset['lr_downscale_types'], str):
                dataset['lr_downscale_types'] = [dataset['lr_downscale_types']]
            dataset['lr_downscale_types'] = [
                (_cv2_str2interpolation[algo.lower()]
                 if isinstance(algo, str) else algo)
                for algo in dataset['lr_downscale_types']
            ]

        for k in [
                'lr_blur_types', 'lr_noise_types', 'lr_noise_types2',
                'hr_noise_types'
        ]:
            if dataset.get(k, None):
                dataset[k] = parse2lists(dataset[k])

        tensor_shape = dataset.get('tensor_shape', None)
        if tensor_shape:
            opt['tensor_shape'] = tensor_shape

    # path
    for key, path in opt['path'].items():
        if path and key in opt['path']:
            opt['path'][key] = os.path.normpath(os.path.expanduser(path))

    if is_train:
        experiments_root = os.path.join(opt['path']['root'], 'experiments',
                                        opt['name'])
        opt['path']['experiments_root'] = experiments_root
        opt['path']['models'] = os.path.join(experiments_root, 'models')
        opt['path']['training_state'] = os.path.join(experiments_root,
                                                     'training_state')
        opt['path']['log'] = experiments_root
        opt['path']['val_images'] = os.path.join(experiments_root,
                                                 'val_images')
        if opt['train'].get('display_freq', None):
            opt['path']['disp_images'] = os.path.join(experiments_root,
                                                      'disp_images')
        opt['train']['overwrite_val_imgs'] = opt['train'].get(
            'overwrite_val_imgs', None)
        opt['train']['val_comparison'] = opt['train'].get(
            'val_comparison', None)
        opt['logger']['overwrite_chkp'] = opt['logger'].get(
            'overwrite_chkp', None)
        fsa = opt['train'].get('use_frequency_separation', None)
        if fsa and not opt['train'].get('fs', None):
            opt['train']['fs'] = fsa

        # change some options for debug mode
        if 'debug_nochkp' in opt['name']:
            opt['train']['val_freq'] = 8
            opt['logger']['print_freq'] = 2
            opt['logger']['save_checkpoint_freq'] = 10000000
            opt['train']['lr_decay_iter'] = 10
        elif 'debug' in opt['name']:
            opt['train']['val_freq'] = 8
            opt['logger']['print_freq'] = 2
            opt['logger']['save_checkpoint_freq'] = 8
            opt['train']['lr_decay_iter'] = 10

    else:  # test
        results_root = os.path.join(opt['path']['root'], 'results',
                                    opt['name'])
        opt['path']['results_root'] = results_root
        opt['path']['log'] = results_root

    # network_G
    opt['network_G']['scale'] = scale

    # relative learning rate and options
    if 'train' in opt:
        niter = opt['train']['niter']
        for k in ['T_period', 'restarts', 'lr_steps', 'lr_steps_inverse']:
            k_rel = k + '_rel'
            if k_rel in opt['train']:
                opt['train'][k] = [int(x * niter) for x in opt['train'][k_rel]]
                opt['train'].pop(k_rel)
        if 'swa_start_iter_rel' in opt['train']:
            opt['train']['swa_start_iter'] = int(
                opt['train']['swa_start_iter_rel'] * niter)
            opt['train'].pop('swa_start_iter_rel')

    # export CUDA_VISIBLE_DEVICES
    gpu_list = ','.join(str(x) for x in opt['gpu_ids'])
    os.environ['CUDA_VISIBLE_DEVICES'] = gpu_list
    print('export CUDA_VISIBLE_DEVICES=' + gpu_list)

    return dict_to_nonedict(opt)
Beispiel #37
0
    import json

try:
    from yaml import load, dump
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
    from yaml import Loader, Dumper

# Only turn on converting all strings to unicode by the YAML loader
# if running Python 2.7 or higher. 2.6 does not seem to like unicode dict keys.
# ???
#
if sys.version_info[0] != 2 or sys.version_info[1] >= 7:
    def construct_yaml_unistr(self, node):
        return self.construct_scalar(node)
    Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_unistr)

EXP_SCRIPT_DIRECTORY = ''

import constants
from constants import EventConstants, DeviceConstants
from constants import KeyboardConstants, MouseConstants, EyeTrackerConstants

from util import print2err, printExceptionDetailsToStdErr, ioHubError
from util import fix_encoding, OrderedDict, module_directory, updateDict
from util import isIterable, getCurrentDateTimeString, convertCamelToSnake
from util import ProgressBarDialog, MessageDialog, FileDialog, ioHubDialog
from util import win32MessagePump

fix_encoding.fix_encoding()
Beispiel #38
0
    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
'''

from base import *
from maps2d import *
from maps3d import *
from maps2hd import *
from maps_nd import *

# register yaml representers
try: # try using the libyaml if installed
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError: # else use default PyYAML loader and dumper
    from yaml import Loader, Dumper

def p2d_representer(dumper, p):
    return dumper.represent_mapping(tag=u'!Point2D',
                                    mapping={'x': float(p.x), 'y': float(p.y)})
Dumper.add_representer(Point2D, p2d_representer)
def p2d_constructor(loader, node):
    data = loader.construct_mapping(node)
    return Point2D([data['x'], data['y']])
Loader.add_constructor(u'!Point2D', p2d_constructor)
Beispiel #39
0
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    if sys.version_info[0] < 3:
        def dict_representer(dumper, data):
            return dumper.represent_dict(data.iteritems())
    else:
        def dict_representer(dumper, data):
            return dumper.represent_dict(data.items())


    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))


    Dumper.add_representer(OrderedDict, dict_representer)
    Loader.add_constructor(_mapping_tag, dict_constructor)

    Dumper.add_representer(str, SafeRepresenter.represent_str)

    if sys.version_info[0] < 3:
        Dumper.add_representer(unicode, SafeRepresenter.represent_unicode)
except Exception:
    yaml = None


def main():
    parser = argparse.ArgumentParser(
        description='Convert between pbjson and json',
        epilog='If converting a PBJSON file with binary elements, you may need to use `--repr` since JSON cannot handle binary data.')
    parser.add_argument('-r', '--repr', action='store_true', help='instead of converting to JSON, just output the `repr` of the object')
    parser.add_argument('-p', '--pretty', action='store_true', help='make it nice for humans')
Beispiel #40
0
@author: nino
'''

from yaml import load as _load, dump as _dump
try:
    from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
    from yaml import Loader, Dumper

from pyyacc.objects import ValueSpec, Requirement, Optional, \
    ConfigurationDescriptor, ConfigSet, to_url, ParseResult
from logging import getLogger

LOG = getLogger(__file__)

Loader.add_constructor("!spec", ValueSpec._yaml_constructor)
Loader.add_constructor("!required", Requirement._yaml_constructor)
Loader.add_constructor("!optional", Optional._yaml_constructor)
Loader.add_constructor("!URI", lambda loader, node: to_url(loader.construct_scalar(node)))
Loader.add_constructor("!uri", lambda loader, node: to_url(loader.construct_scalar(node)))
Loader.add_constructor("!url", lambda loader, node: to_url(loader.construct_scalar(node)))
# this gets a little funky.
# Dumper.add_representer(ParseResult, lambda dumper, data: dumper.represent_scalar("!uri", data.geturl()))


class ConfigurationBuilder(object):
    def __init__(self, descriptor):
        self.descriptor = descriptor

    def build(self, *overlays):
        params = ConfigSet()
Beispiel #41
0
                batch.create_node(node.labels, node.properties)
        for path in self.__paths:
            created.append(path)
            if len(path) == 0:
                query, params = self.__create_node_as_path(path.start_node)
                batch.execute_cypher(query, params)
            elif len(path) == 1:
                query, params = self.__create_relationship_as_path(path.relationship(0))
                batch.execute_cypher(query, params)
            else:
                raise ValueError("Long paths not yet supported")
        for i, result in enumerate(batch.submit()):
            if isinstance(result, Table):
                path = result[0][0]
                for j, node in enumerate(path.nodes):
                    created[i].nodes[j].bind(self.__graph, id=node._id)
                    created[i].nodes[j].replace(*node.labels, **node.properties)
                for j, rel in enumerate(path.rels):
                    created[i].rels[j].bind(self.__graph, id=rel._id)
                    created[i].rels[j].replace(rel.type, **rel.properties)
            else:
                node = result
                created[i].bind(self.__graph, id=node._id)
                created[i].replace(*node.labels, **node.properties)
        return created


# Patch serialisable classes for the CLoader
for cls in (Graph, Node, Rel, Rev, Path):
    Loader.add_constructor(cls.yaml_tag, cls.from_yaml)
Beispiel #42
0
def _create_from_yaml(path, cls_node=TreeNode):
    """ Create tree structure from yaml stream """
    def tree_node_from_values(name, values):
        """ Create `name` node and add values  """
        node = cls_node(str(name))
        using = ''
        for value in values:
            if isinstance(value, TreeNode):
                node.add_child(value)
            elif isinstance(value[0], Control):
                if value[0].code == YAML_INCLUDE:
                    # Include file
                    ypath = value[1]
                    if not os.path.isabs(ypath):
                        ypath = os.path.join(os.path.dirname(path), ypath)
                    node.merge(_create_from_yaml(ypath, cls_node))
                elif value[0].code == YAML_USING:
                    if using:
                        raise ValueError("!using can be used only once per "
                                         "node! (%s:%s)" % (path, name))
                    using = value[1]
                    if using[0] == '/':
                        using = using[1:]
                    if using[-1] == '/':
                        using = using[:-1]
                elif value[0].code == YAML_REMOVE_NODE:
                    value[0].value = value[1]   # set the name
                    node.ctrl.append(value[0])    # add "blue pill" of death
                elif value[0].code == YAML_REMOVE_VALUE:
                    value[0].value = value[1]   # set the name
                    node.ctrl.append(value[0])
                elif value[0].code == YAML_JOIN:
                    node.multiplex = False
            else:
                node.value[value[0]] = value[1]
        if using:
            if name is not '':
                for name in using.split('/')[::-1]:
                    node = cls_node(name, children=[node])
            else:
                using = using.split('/')[::-1]
                node.name = using.pop()
                while True:
                    if not using:
                        break
                    name = using.pop()  # 'using' is list pylint: disable=E1101
                    node = cls_node(name, children=[node])
                node = cls_node('', children=[node])
        return node

    def mapping_to_tree_loader(loader, node):
        """ Maps yaml mapping tag to TreeNode structure """
        _value = loader.construct_pairs(node)
        objects = ListOfNodeObjects()
        for name, values in _value:
            if isinstance(values, ListOfNodeObjects):   # New node from list
                objects.append(tree_node_from_values(name, values))
            elif values is None:            # Empty node
                objects.append(cls_node(str(name)))
            else:                           # Values
                objects.append(Value((name, values)))
        return objects

    def join_loader(loader, obj):
        """
        Special !join loader which allows to tag node as 'multiplex = False'.
        """
        if not isinstance(obj, yaml.ScalarNode):
            objects = mapping_to_tree_loader(loader, obj)
        else:   # This means it's empty node. Don't call mapping_to_tree_loader
            objects = ListOfNodeObjects()
        objects.append((Control(YAML_JOIN), None))
        return objects

    Loader.add_constructor(u'!include',
                           lambda loader, node: Control(YAML_INCLUDE))
    Loader.add_constructor(u'!using',
                           lambda loader, node: Control(YAML_USING))
    Loader.add_constructor(u'!remove_node',
                           lambda loader, node: Control(YAML_REMOVE_NODE))
    Loader.add_constructor(u'!remove_value',
                           lambda loader, node: Control(YAML_REMOVE_VALUE))
    Loader.add_constructor(u'!join', join_loader)
    Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                           mapping_to_tree_loader)

    with open(path) as stream:
        return tree_node_from_values('', yaml.load(stream, Loader))