コード例 #1
0
ファイル: test_metrics.py プロジェクト: derskeal/fbkutils
    def test_flatten_metrics_definition(self):
        """Metrics definitions are flattened to a dot-separated list"""
        yaml = '''
- rps
- latency:
  - p50
  - p95
  - p99.9
  - 1
'''
        metrics = MetricsConfig(load_yaml(yaml))
        expected = [
            'latency.1',
            'latency.p50',
            'latency.p95',
            'latency.p99_9',
            'rps',
        ]
        self.assertListEqual(expected, metrics.names)

        yaml = '''
- rps
- latency:
  - nesting:
    - some:
      - more
    - else
'''
        metrics = MetricsConfig(load_yaml(yaml))
        expected = ['latency.nesting.else', 'latency.nesting.some.more', 'rps']
        self.assertListEqual(expected, metrics.names)
コード例 #2
0
ファイル: content.py プロジェクト: akiyoko/relate
def get_yaml_from_repo(repo, full_name, commit_sha, cached=True):
    """Return decoded, struct-ified YAML data structure from
    the given file in *repo* at *commit_sha*.

    See :class:`relate.utils.Struct` for more on
    struct-ification.
    """

    if cached:
        from six.moves.urllib.parse import quote_plus
        cache_key = "%%%2".join(
                (quote_plus(repo.controldir()), quote_plus(full_name),
                    commit_sha.decode()))

        import django.core.cache as cache
        def_cache = cache.caches["default"]
        result = None
        # Memcache is apparently limited to 250 characters.
        if len(cache_key) < 240:
            result = def_cache.get(cache_key)
        if result is not None:
            return result

    expanded = expand_yaml_macros(
            repo, commit_sha,
            get_repo_blob(repo, full_name, commit_sha).data)

    result = dict_to_struct(load_yaml(expanded))

    if cached:
        def_cache.add(cache_key, result, None)

    return result
コード例 #3
0
    def post(self):
        form = YamlCheckerForm()

        if not form.validate_on_submit():
            return render_template("yamlchecker/main.html", form=form)

        if form.type.data == "bungeecord":
            checker = BungeeCordConfigChecker()
        elif form.type.data == "redisbungee":
            checker = RedisBungeeConfigChecker()
        else:
            form.type.errors.append("This is an invalid configuration type.")
            return render_template("yamlchecker/main.html", form=form)

        try:
            yaml = load_yaml(form.yaml_file.data)
        except YAMLError:
            flash(
                Markup('A syntax error was detected. You may want to use '
                       '<a href="http://yaml-online-parser.appspot.com/">this tool</a> '
                       'to determine the problem.'),
                "formerror")
            return render_template("yamlchecker/main.html", form=form)

        if not isinstance(yaml, dict):
            flash("This YAML file does not represent a dictionary (mapping).", "formerror")
            return render_template("yamlchecker/main.html", form=form)

        for message in checker.check_config(yaml):
            flash(message['message'], message['class'])

        return render_template("yamlchecker/main.html", validated=len(get_flashed_messages()) == 0,
                               form=form)
コード例 #4
0
ファイル: content.py プロジェクト: akiyoko/relate
def get_raw_yaml_from_repo(repo, full_name, commit_sha):
    """Return decoded YAML data structure from
    the given file in *repo* at *commit_sha*.

    :arg commit_sha: A byte string containing the commit hash
    """

    from six.moves.urllib.parse import quote_plus
    cache_key = "%RAW%%2".join((
        quote_plus(repo.controldir()), quote_plus(full_name), commit_sha.decode()))

    import django.core.cache as cache
    def_cache = cache.caches["default"]
    result = None
    # Memcache is apparently limited to 250 characters.
    if len(cache_key) < 240:
        result = def_cache.get(cache_key)
    if result is not None:
        return result

    result = load_yaml(
            expand_yaml_macros(
                repo, commit_sha,
                get_repo_blob(repo, full_name, commit_sha).data))

    def_cache.add(cache_key, result, None)

    return result
コード例 #5
0
ファイル: validation.py プロジェクト: simudream/relate
def check_attributes_yml(vctx, repo, path, tree):
    try:
        _, attr_blob_sha = tree[".attributes.yml"]
    except KeyError:
        # no .attributes.yml here
        pass
    else:
        from relate.utils import dict_to_struct
        from yaml import load as load_yaml

        att_yml = dict_to_struct(load_yaml(repo[attr_blob_sha].data))

        loc = path + "/" + ".attributes.yml"
        validate_struct(vctx, loc, att_yml, required_attrs=[], allowed_attrs=[("public", list), ("in_exam", list)])

        for access_kind in ["public", "in_exam"]:
            if hasattr(att_yml, access_kind):
                for i, l in enumerate(att_yml.public):
                    if not isinstance(l, six.string_types):
                        raise ValidationError("%s: entry %d in '%s' is not a string" % (loc, i + 1, access_kind))

    import stat

    for entry in tree.items():
        if stat.S_ISDIR(entry.mode):
            _, blob_sha = tree[entry.path]
            subtree = repo[blob_sha]
            check_attributes_yml(vctx, repo, path + "/" + entry.path.decode("utf-8"), subtree)
コード例 #6
0
def load_config(app_name):
    global _config

    search_filenames = [
        os.path.expanduser("~/.fundraising/%s.yaml" % app_name),
        os.path.expanduser("~/.%s.yaml" % app_name),
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/config.yaml" % app_name,
        "/etc/fundraising/%s.yaml" % app_name,
        "/etc/%s.yaml" % app_name,
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/%s.yaml" % (app_name, app_name,)
    ]
    # TODO: if getops.get(--config/-f): search_filenames.append

    for filename in search_filenames:
        if not os.path.exists(filename):
            continue

        _config = DictAsAttrDict(load_yaml(file(filename, 'r')))
        log.info("Loaded config from {path}.".format(path=filename))

        _config.app_name = app_name

        # TODO: Move up a level, entry point should directly call logging
        # configuration.
        process.log.setup_logging()

        return _config

    raise Exception("No config found, searched " + ", ".join(search_filenames))
コード例 #7
0
def load_config(app_name):
    global config

    search_filenames = [
        os.path.expanduser("~/.fundraising/%s.yaml" % app_name),
        os.path.expanduser("~/.%s.yaml" % app_name),
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/config.yaml" % app_name,
        "/etc/fundraising/%s.yaml" % app_name,
        "/etc/%s.yaml" % app_name,
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/%s.yaml" % (app_name, app_name,)
    ]
    # TODO: if getops.get(--config/-f): search_filenames.append

    for filename in search_filenames:
        if not os.path.exists(filename):
            continue

        config = DictAsAttrDict(load_yaml(file(filename, 'r')))
        log.info("Loaded config from {path}.".format(path=filename))

        config.app_name = app_name

        return

    raise Exception("No config found, searched " + ", ".join(search_filenames))
コード例 #8
0
ファイル: content.py プロジェクト: ishitatsuyuki/relate
def get_raw_yaml_from_repo(repo, full_name, commit_sha):
    # type: (Repo_ish, Text, bytes) -> Any
    """Return decoded YAML data structure from
    the given file in *repo* at *commit_sha*.

    :arg commit_sha: A byte string containing the commit hash
    """

    from six.moves.urllib.parse import quote_plus
    cache_key = "%RAW%%2".join((
        CACHE_KEY_ROOT,
        quote_plus(repo.controldir()), quote_plus(full_name), commit_sha.decode(),
        ))

    import django.core.cache as cache
    def_cache = cache.caches["default"]

    result = None  # type: Optional[Any]
    # Memcache is apparently limited to 250 characters.
    if len(cache_key) < 240:
        result = def_cache.get(cache_key)
    if result is not None:
        return result

    yaml_str = expand_yaml_macros(
                repo, commit_sha,
                get_repo_blob(repo, full_name, commit_sha,
                    allow_tree=False).data)

    result = load_yaml(yaml_str)  # type: ignore

    def_cache.add(cache_key, result, None)

    return result
コード例 #9
0
ファイル: content.py プロジェクト: gboone/relate
def get_yaml_from_repo(repo, full_name, commit_sha, cached=True):
    """Return decoded, struct-ified YAML data structure from
    the given file in *repo* at *commit_sha*.

    See :class:`relate.utils.Struct` for more on
    struct-ification.
    """

    if cached:
        cache_key = "%%%2".join((repo.controldir(), full_name, commit_sha))

        import django.core.cache as cache

        def_cache = cache.caches["default"]
        result = def_cache.get(cache_key)
        if result is not None:
            return result

    result = dict_to_struct(
        load_yaml(expand_yaml_macros(repo, commit_sha, get_repo_blob(repo, full_name, commit_sha).data))
    )

    if cached:
        def_cache.add(cache_key, result, None)

    return result
コード例 #10
0
    def post(self):
        form = ConfigurationConverterForm()

        if not form.validate_on_submit():
            return render_template("superbvote/convert.html", form=form)

        try:
            yaml = load_yaml(form.yaml_file.data)
        except YAMLError:
            flash(
                Markup('A syntax error was detected. You may want to use '
                       '<a href="http://yaml-online-parser.appspot.com/">this tool</a> '
                       'to determine the problem.'),
                "formerror")
            return render_template("superbvote/convert.html", form=form)

        if not isinstance(yaml, dict):
            flash("This YAML file does not represent a dictionary (mapping).", "formerror")
            return render_template("superbvote/convert.html", form=form)

        try:
            result = process_configuration(yaml)
        except Exception as e:
            flash("This YAML file does not look like a GAListener configuration.", "formerror")
            return render_template("superbvote/convert.html", form=form)

        return make_response((dump(result), 200, {"Content-Type": "application/yaml",
                                                  "Content-Disposition": "attachment; filename=superbvote_config.yml"}))
コード例 #11
0
ファイル: validation.py プロジェクト: beesor/relate
def check_attributes_yml(vctx, repo, path, tree):
    try:
        _, attr_blob_sha = tree[".attributes.yml"]
    except KeyError:
        # no .attributes.yml here
        pass
    else:
        from relate.utils import dict_to_struct
        from yaml import load as load_yaml

        att_yml = dict_to_struct(load_yaml(repo[attr_blob_sha].data))

        loc = path + "/" + ".attributes.yml"
        validate_struct(
                vctx, loc, att_yml,
                required_attrs=[],
                allowed_attrs=[
                    ("public", list),
                ])

        if hasattr(att_yml, "public"):
            for i, l in enumerate(att_yml.public):
                if not isinstance(l, (str, unicode)):
                    raise ValidationError(
                            "%s: entry %d in 'public' is not a string"
                            % (loc, i+1))

    import stat
    for entry in tree.items():
        if stat.S_ISDIR(entry.mode):
            _, blob_sha = tree[entry.path]
            subtree = repo[blob_sha]
            check_attributes_yml(vctx, repo, path+"/"+entry.path, subtree)
コード例 #12
0
ファイル: core.py プロジェクト: pombredanne/bolt
    def run(self, spec):
        """Execute the various tasks given in the spec list."""

        try:

            if output.debug:
                names = ", ".join(info[0] for info in spec)
                print("Tasks to run: %s" % names)

            call_hooks('commands.before', self.tasks, spec)

            # Initialise the default stage if none are given as the first task.
            if 'stages' in env:
                if spec[0][0] not in env.stages:
                    self.execute_task(env.stages[0], (), {}, None)
                else:
                    self.execute_task(*spec.pop(0))

            # Load the config YAML file if specified.
            if env.config_file:
                config_path = realpath(expanduser(env.config_file))
                config_path = join(self.directory, config_path)
                config_file = open(config_path, 'rb')
                config = load_yaml(config_file.read())
                if not config:
                    env.config = AttrDict()
                elif not isinstance(config, dict):
                    abort("Invalid config file found at %s" % config_path)
                else:
                    env.config = AttrDict(config)
                config_file.close()

            call_hooks('config.loaded')

            # Execute the tasks in order.
            for info in spec:
                self.execute_task(*info)

            if output.status:
                msg = "\nDone."
                if env.colors:
                    msg = env.color_settings['finish'](msg)
                print(msg)

        except SystemExit:
            raise
        except KeyboardInterrupt:
            if output.status:
                msg = "\nStopped."
                if env.colors:
                    msg = env.color_settings['finish'](msg)
                print >> sys.stderr, msg
            sys.exit(1)
        except:
            sys.excepthook(*sys.exc_info())
            sys.exit(1)
        finally:
            call_hooks('commands.after')
            disconnect_all()
コード例 #13
0
ファイル: config.py プロジェクト: vladsaveliev/TargQC
def load_yaml_config(fpath):
    verify_file(fpath, is_critical=True)
    try:
        dic = load_yaml(open(fpath))
    except Exception:
        err(format_exc())
        critical('Could not parse bcbio YAML ' + fpath)
    else:
        return dic
コード例 #14
0
ファイル: motif.py プロジェクト: ryanraaum/oldowan.mitotype
 def _process_yaml(self, yaml_string):
     yaml_motifs = load_yaml(yaml_string)
     for entry in yaml_motifs:
         polys = list(Polymorphism(int(k),0,v) for k,v in entry['polymorphisms'].iteritems())
         self[entry['id']] = Motif(id=entry['id'],
                                   label=entry['label'],
                                   sources=entry['source'],
                                   polymorphisms=polys)
         self.__sources.update(self[entry['id']].sources)
コード例 #15
0
ファイル: __main__.py プロジェクト: molobrakos/tellsticknet
def read_config():
    for directory, filename in product(CONFIG_DIRECTORIES, CONFIG_FILES):
        try:
            config = join(directory, filename)
            _LOGGER.debug("checking for config file %s", config)
            with open(config) as config:
                return list(load_yaml(config))
        except (IOError, OSError):
            continue
    return {}
コード例 #16
0
def override(settings, yaml=None, env=None):
    """
    :param dict settings: settings dict to be updated; usually it's ``globals()``
    :param yaml: path to YAML file
    :type yaml: str or FileIO
    :param str env: prefix for environment variables
    """
    if yaml is not None:
        if hasattr(yaml, 'read'):
            settings.update(load_yaml(yaml.read()))
        else:
            if os.path.exists(yaml):
                with open(yaml) as f:
                    settings.update(load_yaml(f.read()))

    if env is not None:
        for k, v in os.environ.items():
            if k.startswith(env):
                settings[k[len(env):]] = load_yaml(v)
コード例 #17
0
ファイル: __init__.py プロジェクト: zolkko/blumenplace-front
    def __init__(self, yaml_file, root_path, defaults=None):
        super(Config, self).__init__(root_path, defaults)

        if not os.path.exists(yaml_file):
            raise ConfigError('Configuration file "%s" does not exists.' % yaml_file)

        with open(yaml_file, 'r') as f:
            conf_data = f.read()

        self.update(load_yaml(conf_data))
コード例 #18
0
ファイル: config.py プロジェクト: sprucedev/DockCI-Agent
    def load_yaml_file(self, path):
        """ Load the data in the file as YAML, then deserialize into the model
        with the schema

        :param path: Path to the YAML file to load
        :type path: py.path.local
        """
        with path.open('r') as handle:
            data = load_yaml(handle)

        self.set_all(**self.SCHEMA.load(data).data)
コード例 #19
0
ファイル: loader.py プロジェクト: Kami/dateparser
 def _load_data(self):
     if self.file is None:
         data = get_data('data', 'languages.yaml')
     else:
         data = self.file.read()
     data = load_yaml(data)
     base_data = data.pop('base', {'skip': []})
     known_languages = {}
     for shortname, language_info in six.iteritems(data):
         self._update_language_info_with_base_info(language_info, base_data)
         language = Language(shortname, language_info)
         if language.validate_info():
             known_languages[shortname] = language
     self._data = known_languages
コード例 #20
0
ファイル: conf.py プロジェクト: EzyInsights/dateparser
    def __init__(self, **kwargs):
        """
        Settings are now loaded using the data/settings.yaml file.
        """

        data = get_data('data', 'settings.yaml')
        data = load_yaml(data)
        settings_data = data.pop('settings', {})

        for datum in settings_data:
            setattr(self, datum, settings_data[datum])

        for key in kwargs:
            setattr(self, key, kwargs[key])
コード例 #21
0
ファイル: content.py プロジェクト: alexislitool/courseflow
def get_yaml_from_repo_as_dict(repo, full_name, commit_sha):
    cache_key = "%DICT%%2".join((repo.controldir(), full_name, commit_sha))

    import django.core.cache as cache
    def_cache = cache.caches["default"]
    result = def_cache.get(cache_key)
    if result is not None:
        return result

    result = load_yaml(get_repo_blob(repo, full_name, commit_sha).data)

    def_cache.add(cache_key, result, None)

    return result
コード例 #22
0
ファイル: loader.py プロジェクト: EzyInsights/dateparser
 def _load_data(self):
     if self.file is None:
         data = get_data("data", "languages.yaml")
     else:
         data = self.file.read()
     data = load_yaml(data)
     base_data = data.pop("base", {"skip": []})
     base_data["skip"] += settings.SKIP_TOKENS
     known_languages = {}
     for shortname, language_info in six.iteritems(data):
         self._update_language_info_with_base_info(language_info, base_data)
         language = Language(shortname, language_info)
         if language.validate_info():
             known_languages[shortname] = language
     self._data = known_languages
コード例 #23
0
ファイル: yatiblog.py プロジェクト: Epivalent/ampify
    def init_rst_source(source_file, destname=None):

        source_path = join_path(source_directory, source_file)
        source_file_obj = open(source_path, 'rb')
        content = source_file_obj.read()
        source_file_obj.close()

        if not content.startswith('---'):
            return

        filebase, filetype = splitext(source_file)
        filebase = filebase.lower()

        env = load_yaml(match_yaml_frontmatter(content).group(1))
        layout = env.pop('layout')

        if layout not in layouts:
            load_layout(layout, source_directory, layouts)

        content = replace_yaml_frontmatter('', content)

        if MORE_LINE in content:
            lead = content.split(MORE_LINE)[0]
            content = content.replace(MORE_LINE, '')
        else:
            lead = content

        if destname:
            destname = join_path(output_directory, destname)
        else:
            destname = join_path(output_directory, filebase + '.html')

        sources[source_file] = {
            '__content__': content,
            '__deps__': find_include_refs(content),
            '__env__': env,
            '__genfile__': destname,
            '__id__': source_file,
            '__layout__': layout,
            '__lead__': lead,
            '__mtime__': stat(source_path).st_mtime,
            '__name__': basename(destname), # filebase,
            '__outdir__': output_directory,
            '__path__': source_path,
            '__rst__': True,
            '__type__': 'text',
            '__filetype__': filetype
            }
コード例 #24
0
ファイル: content.py プロジェクト: ishitatsuyuki/relate
def get_yaml_from_repo(repo, full_name, commit_sha, cached=True):
    # type: (Repo_ish, Text, bytes, bool) -> Any

    """Return decoded, struct-ified YAML data structure from
    the given file in *repo* at *commit_sha*.

    See :class:`relate.utils.Struct` for more on
    struct-ification.
    """

    if cached:
        try:
            import django.core.cache as cache
        except ImproperlyConfigured:
            cached = False
        else:
            from six.moves.urllib.parse import quote_plus
            cache_key = "%%%2".join(
                    (CACHE_KEY_ROOT,
                        quote_plus(repo.controldir()), quote_plus(full_name),
                        commit_sha.decode()))

            def_cache = cache.caches["default"]
            result = None
            # Memcache is apparently limited to 250 characters.
            if len(cache_key) < 240:
                result = def_cache.get(cache_key)
            if result is not None:
                return result

    yaml_bytestream = get_repo_blob(
            repo, full_name, commit_sha, allow_tree=False).data
    yaml_text = yaml_bytestream.decode("utf-8")

    if LINE_HAS_INDENTING_TABS_RE.search(yaml_text):
        raise ValueError("File uses tabs in indentation. "
                "This is not allowed.")

    expanded = expand_yaml_macros(
            repo, commit_sha, yaml_bytestream)

    yaml_data = load_yaml(expanded)  # type:ignore
    result = dict_to_struct(yaml_data)

    if cached:
        def_cache.add(cache_key, result, None)

    return result
コード例 #25
0
ファイル: file.py プロジェクト: Readon/cydra
    def load_file(self, filename):
        cfile = codecs.open(filename, "r", "utf-8")
        try:
            return json.load(cfile)
        except ValueError:
            # it is not in JSON format, try YAML if available
            if load_yaml:
                try:
                    cfile.seek(0)
                    return load_yaml(cfile)
                except:
                    logger.exception("Unable to parse YAML")
        finally:
            cfile.close()

        logger.error("Unable to parse configfile: " + filename)
        return {}
コード例 #26
0
ファイル: content.py プロジェクト: alexislitool/courseflow
def get_yaml_from_repo(repo, full_name, commit_sha, cached=True):
    if cached:
        cache_key = "%%%2".join((repo.controldir(), full_name, commit_sha))

        import django.core.cache as cache
        def_cache = cache.caches["default"]
        result = def_cache.get(cache_key)
        if result is not None:
            return result

    result = dict_to_struct(
            load_yaml(get_repo_blob(repo, full_name, commit_sha).data))

    if cached:
        def_cache.add(cache_key, result, None)

    return result
コード例 #27
0
ファイル: yatiblog.py プロジェクト: Epivalent/ampify
def load_layout(name, path, layouts, deps=None):
    """Load the given layout template."""

    template_path = join_path(path, '_layouts', name + '.genshi')
    template_file = open(template_path, 'rb')
    content = template_file.read()
    template_file.close()

    env = {}
    front_matter = match_yaml_frontmatter(content)

    if front_matter:
        env = load_yaml(front_matter.group(1))
        layout = env.pop('layout', None)
        if layout:
            if layout not in layouts:
                load_layout(layout, path, layouts)
            deps = layouts[layout]['__deps__']
            if deps:
                deps = [layout] + deps
            else:
                deps = [layout]
        content = replace_yaml_frontmatter('', content)

    if env.get('text_template'):
        try:
            template = TextTemplate(content, encoding='utf-8')
        except Exception:
            print "Error parsing template:", name
            raise
    else:
        try:
            template = MarkupTemplate(content, encoding='utf-8')
        except Exception:
            print "Error parsing template:", name
            raise

    layouts[name] = {
        '__deps__': deps,
        '__env__': env,
        '__mtime__': stat(template_path).st_mtime,
        '__name__': name,
        '__path__': template_path,
        '__template__': template,
        }
コード例 #28
0
ファイル: content.py プロジェクト: gboone/relate
def get_raw_yaml_from_repo(repo, full_name, commit_sha):
    """Return decoded YAML data structure from
    the given file in *repo* at *commit_sha*.
    """

    cache_key = "%RAW%%2".join((repo.controldir(), full_name, commit_sha))

    import django.core.cache as cache

    def_cache = cache.caches["default"]
    result = def_cache.get(cache_key)
    if result is not None:
        return result

    result = load_yaml(expand_yaml_macros(repo, commit_sha, get_repo_blob(repo, full_name, commit_sha).data))

    def_cache.add(cache_key, result, None)

    return result
コード例 #29
0
def load_config(path, module_to_dict=True):
    """
    loads configuration from .json, .yml, and .py.
    always returns a dict, even in the .py case by default; it's up to you how you'd like to
    handle the result, e.g. as-is or using  globals.update(config) to get direct
    variable name access (only works if the result is a dict, which may not be the
    case for example with some .json files defining list structures at the top level.
    if module_to_dict is True (default), python modules are unpacked into an explicit dict.
    """

    filename = os.path.basename(path)
    ext = os.path.splitext(filename)[-1]
    if ext not in legalConfigExtensions:
        raise ValueError("{} is not a currently supported extension."+
                        "Supported file types are: {}".format(ext,legalConfigExtensions))

    if ext=='.yml':
        from yaml import safe_load as load_yaml
        with open(path,'r') as infile:
            config = load_yaml(infile)
    elif ext=='.json':
        from json import load as load_json
        with open(path,'r') as infile:
            config = load_json(infile)
    elif ext=='.py':
        directory = os.path.split(path)[0]
        module = os.path.splitext(filename)[0]
        cwd = os.getcwd()
        if directory != '':
            os.chdir(directory)
        config_module = import_module(module)
        os.chdir(cwd)
        if module_to_dict:
            names = [name for name in dir(config_module) if not name.startswith('__')]
            config = dict()
            for name in names:
                config[name] = config_module.__dict__[name]
        else:
            config = config_module

    return config
コード例 #30
0
ファイル: test_command.py プロジェクト: wyuenho/blueberrypy
    def test_setup_rest_controller(self):
        app_yml_file = FakeFile(textwrap.dedent("""
        global:
            engine.sqlalchemy.on: true
            environment: test_suite
        controllers:
            /api:
                controller: !!python/name:blueberrypy.tests.test_command.rest_controller
                /:
                    tools.sessions.storage_type: redis
                    tools.orm_session.on: true
        sqlalchemy_engine:
            url: sqlite://
        email:
            host: localhost
            port: 1025
        """))
        path_file_mapping = {"/tmp/dev/app.yml": app_yml_file}
        self._stub_out_path_and_open(path_file_mapping)

        sys.argv = ("blueberrypy -C /tmp serve").split()
        main()

        app_config = load_yaml(app_yml_file.getvalue())
        controller_config = app_config["controllers"]['/api'].copy()
        controller = controller_config.pop("controller")
        controller_config["/"].update({"request.dispatch": controller})

        merged_app_config = cherrypy.tree.apps["/api"].config
        for k, v in list(controller_config.viewitems()):
            self.assertEqual(v, merged_app_config[k])

        for k, v in list(app_config["global"].viewitems()):
            self.assertEqual(v, merged_app_config["global"][k])

        for k, v in list(app_config["email"].viewitems()):
            self.assertEqual(v, merged_app_config["email"][k])

        for k, v in list(app_config["sqlalchemy_engine"].viewitems()):
            self.assertEqual(v, merged_app_config["sqlalchemy_engine"][k])
コード例 #31
0
ファイル: config.py プロジェクト: umccr/vcf_stuff
from __future__ import division
import os
from os.path import abspath, expanduser, join, dirname, pardir
from yaml import load as load_yaml

from ngs_utils.config import load_yaml_config, fill_dict_from_defaults
from ngs_utils.file_utils import verify_file, verify_module, adjust_path
from ngs_utils.logger import info, critical, warn, err

configs_dirpath = join(dirname(abspath(__file__)), 'configs')
filt_info_defaults_yaml_fpath = join(configs_dirpath, 'RUNINFO_DEFAULTS.yaml')
verify_file(filt_info_defaults_yaml_fpath, is_critical=True)
filt_info_defaults = load_yaml(open(filt_info_defaults_yaml_fpath))
filt_cnf_fpaths = dict(
    exome=join(configs_dirpath, 'run_info_ExomeSeq.yaml'),
    genome=join(configs_dirpath, 'run_info_WGS.yaml'),
    panel=join(configs_dirpath, 'run_info_DeepSeq.yaml'),
    rnaseq=join(configs_dirpath, 'run_info_RNAseq.yaml'),
)
filt_cnf_fpaths['targeted'] = filt_cnf_fpaths['deep_seq'] = filt_cnf_fpaths[
    'panel']
filt_cnf_fpaths['wgs'] = filt_cnf_fpaths['genome']


def get_filt_cfg(opts, target_type=None, vardict_min_freq=None, is_wgs=False):
    if not isinstance(opts, dict):
        opts = opts.__dict__
    if not target_type and 'target_type' in opts:
        target_type = opts['target_type']

    # pick the defaults yaml based on target type, or from input filt_cnf
コード例 #32
0
Repeatedly checks Drexel's Term Master Schedule for availability of class sections

Author:  Anshul Kharbanda
Created: 9 - 21 - 2018
"""
from register_me_silly import check_enrollment_for_all_classes
from time import sleep
from yaml import load as load_yaml

def run_periodally(interval, func):
    """
    Runs the given function periodically every interval

    :param interval: the interval to run by
    :param func: the function to run
    """
    while True:
        func()
        sleep(interval)

# Main routine
if __name__ == '__main__':
    config = {}
    with open('config.yaml') as f:
        config = load_yaml(f)
    classes = config['classes']
    key = config['key']
    interval = int(config['interval'])
    run_periodally(interval,
        lambda: check_enrollment_for_all_classes(classes, key))
コード例 #33
0
    res = requests.post(api_url,
                        auth=('api', api_key),
                        data={
                            'from': config['from_addr'],
                            'to': config['to_addr'],
                            'subject': subject,
                            'html': mail_text
                        })
    print(res.status_code)


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('--input', '-i', required=True)
    parser.add_argument('--config', '-c', required=True)
    parser.add_argument('--subject', '-s')
    parser.add_argument('--serial', '-n', type=int)
    args = parser.parse_args()

    config = load_yaml(open(args.config))

    if args.subject is None:
        if args.serial is None:
            raise ValueError(
                'Please input newsletter subject or serial number')
        subject = config.get('subject', '#{}').format(args.serial)
    else:
        subject = args.subject

    send_email(args.input, subject, config)
コード例 #34
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--root',
                        required=True,
                        help="Root path of Turbulenz Engine")
    parser.add_argument('--assets-path',
                        required=True,
                        action='append',
                        help="Path to root of source assets")
    parser.add_argument('--build-path',
                        default=path_join('_build', 'assets'),
                        help="Path for intermediate build files")
    parser.add_argument('--install-path',
                        default='staticmax',
                        help="Path to install output assets into")
    parser.add_argument('--verbose', action='store_true')
    parser.add_argument(
        '--imagemagick-convert',
        help="Path to ImageMagick convert executable (enables TGA support)")

    args = parser.parse_args(argv[1:])

    assets_paths = [normpath(p) for p in args.assets_path]
    base_build_path = normpath(args.build_path)
    build_paths = [
        path_join(base_build_path, 'textures'),
        path_join(base_build_path, 'models'),
        path_join(base_build_path, 'sounds'),
        path_join(base_build_path, 'materials'),
        path_join(base_build_path, 'shaders'),
        path_join(base_build_path, 'fonts'),
        path_join(base_build_path, 'videos'),
    ]
    create_dir(base_build_path)
    for path in build_paths:
        create_dir(path)

    create_dir(args.install_path)

    tools = Tools(args, base_build_path)

    with open('deps.yaml', 'rt') as f:
        asset_build_info = load_yaml(f.read())
        if asset_build_info:
            asset_build_info = [
                AssetInfo(asset_info) for asset_info in asset_build_info
            ]
        else:
            asset_build_info = []

    try:
        with open(path_join(base_build_path, 'sourcehashes.json'), 'rt') as f:
            source_list = SourceList(load_json(f.read()), assets_paths)
    except IOError:
        if args.verbose:
            print 'No source hash file'
        source_list = SourceList({}, assets_paths)

    try:
        assets_rebuilt = 0
        for asset_info in asset_build_info:
            rebuild = build_asset(asset_info, source_list, tools,
                                  base_build_path, args.verbose)
            if rebuild:
                assets_rebuilt += 1
    except CalledProcessError as e:
        error('Tool failed - %s' % str(e))
        return 1
    except IOError as e:
        error(str(e))

    with open(path_join(base_build_path, 'sourcehashes.json'), 'wt') as f:
        f.write(dump_json(source_list.get_hashes()))

    print 'Installing assets and building mapping table...'
    mapping = install(asset_build_info, args.install_path)

    with open('mapping_table.json', 'wt') as f:
        f.write(dump_json({'urnmapping': mapping}))

    remove_old_build_files(asset_build_info, build_paths)

    print '%d assets rebuilt' % assets_rebuilt
    print 'Assets build complete'
コード例 #35
0
    _translate_status(doc)
    _translate_organ(doc)
    _translate_donor_metadata(doc)
    _translate_specimen_type(doc)
    _translate_data_type(doc)
    _translate_timestamp(doc)
    _translate_access_level(doc)


# Utils:

_enums_dir = Path(
    __file__
).parent.parent.parent.parent / 'search-schema' / 'data' / 'definitions' / 'enums'
_enums = {
    path.stem: load_yaml(path.read_text())
    for path in _enums_dir.iterdir()
}


def _map(doc, key, map):
    # The recursion is usually not needed...
    # but better to do it everywhere than to miss one case.
    if key in doc:
        doc[f'mapped_{key}'] = map(doc[key])
    if 'donor' in doc:
        _map(doc['donor'], key, map)
    if 'origin_sample' in doc:
        _map(doc['origin_sample'], key, map)
    if 'source_sample' in doc:
        for sample in doc['source_sample']:
コード例 #36
0
    def post(self, request, *args, **kwargs):
        if not request.user.is_authenticated:
            return JsonResponse({
                'Status': False,
                'Error': 'Log in required'
            },
                                status=403)

        if request.user.type != 'shop':
            return JsonResponse(
                {
                    'Status': False,
                    'Error': 'Только для магазинов'
                }, status=403)

        url = request.data.get('url')
        if url:
            validate_url = URLValidator()
            try:
                validate_url(url)
            except ValidationError as e:
                return JsonResponse({'Status': False, 'Error': str(e)})
            else:
                stream = get(url).content

                data = load_yaml(stream, Loader=Loader)

                shop, _ = Shop.objects.get_or_create(name=data['shop'],
                                                     user_id=request.user.id)
                for category in data['categories']:
                    category_object, _ = Category.objects.get_or_create(
                        id=category['id'], name=category['name'])
                    category_object.shops.add(shop.id)
                    category_object.save()
                ProductInfo.objects.filter(shop_id=shop.id).delete()
                for item in data['goods']:
                    product, _ = Product.objects.get_or_create(
                        name=item['name'], category_id=item['category'])

                    product_info = ProductInfo.objects.create(
                        product_id=product.id,
                        external_id=item['id'],
                        model=item['model'],
                        price=item['price'],
                        price_rrc=item['price_rrc'],
                        quantity=item['quantity'],
                        shop_id=shop.id)
                    for name, value in item['parameters'].items():
                        parameter_object, _ = Parameter.objects.get_or_create(
                            name=name)
                        ProductParameter.objects.create(
                            product_info_id=product_info.id,
                            parameter_id=parameter_object.id,
                            value=value)

                return JsonResponse({'Status': True})

        return JsonResponse({
            'Status': False,
            'Errors': 'Не указаны все необходимые аргументы'
        })
コード例 #37
0
    def post(self, request, *args, **kwargs):

        try:
            user = Token.objects.get(key=request.headers['token']).user
        except:
            return JsonResponse(
                {
                    'Status':
                    False,
                    'Error':
                    'User which could be associated with that token doesn\'t exist'
                },
                status=403)

        if user.type != 'shop':
            return JsonResponse(
                {
                    'Status': False,
                    'Error': 'This API is only for shops'
                },
                status=403)

        url = request.data.get('url')
        if url:
            validate_url = URLValidator()
            try:
                validate_url(url)
            except ValidationError as e:
                return JsonResponse({'Status': False, 'Error': str(e)})
            else:
                stream = get(url).content

                data = load_yaml(stream, Loader=Loader)

                shop, _ = Shop.objects.get_or_create(name=data['shop'])
                for category in data['categories']:
                    category_object, _ = Category.objects.get_or_create(
                        id=category['id'], name=category['name'])
                    category_object.shop.add(shop.id)
                    category_object.save()
                ProductInfo.objects.filter(shop_id=shop.id).delete()
                for item in data['goods']:
                    product, _ = Product.objects.get_or_create(
                        name=item['name'], category_id=item['category'])

                    product_info = ProductInfo.objects.create(
                        product_id=product.id,
                        external_id=item['id'],
                        model=item['model'],
                        shop_id=shop.id,
                        name=item['name'],
                        quantity=item['quantity'],
                        price=item['price'],
                        price_rrc=item['price_rrc'])

                    for name, value in item['parameters'].items():
                        parameter_object, _ = Parameter.objects.get_or_create(
                            name=name)
                        ProductParameter.objects.create(
                            product_info_id=product_info.id,
                            parameter_id=parameter_object.id,
                            value=value)

                return JsonResponse({'Status': True})

        return JsonResponse({
            'Status': False,
            'Error': 'Not all required arguments are filled'
        })
コード例 #38
0
        {
            'message': e.message,
            'absolute_schema_path': _as_path_string(e.absolute_schema_path),
            'absolute_path': _as_path_string(e.absolute_path)
        } for e in validator.iter_errors(doc)
    ]
    doc['mapper_metadata'] = {'validation_errors': errors}


def _as_path_string(mixed):
    '''
    >>> _as_path_string(['a', 2, 'z'])
    '/a/2/z'

    '''
    sep = '/'
    return sep + sep.join(str(s) for s in mixed)


if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description='Given a source document, transform it.'
    )

    parser.add_argument('input', type=argparse.FileType('r'), help='Path of input YAML/JSON.')
    args = parser.parse_args()
    input_yaml = args.input.read()
    doc = load_yaml(input_yaml)
    transformed = transform(doc)
    print(dumps(transformed, sort_keys=True, indent=2))
コード例 #39
0
ファイル: generate.py プロジェクト: fermenreq/camp
 def load_metamodel(self):
     data = get_data('camp', 'data/metamodel.yml')
     metamodel = load_yaml(data)
     metaclasses = load_all_classes(metamodel)
     for each in metaclasses:
         self._definitions[each.name] = each
コード例 #40
0
ファイル: puzzle.py プロジェクト: dlichtistw/3Puzzle
def load_tiles(filename):
    with open(filename, 'r') as input:
        tiles = load_yaml(input)
    for tile in tiles:
        check_tile(tile)
    return tuple(Tile(*(Face(s) for s in tile)) for tile in tiles)
コード例 #41
0
    def post(self, request, format=None):
        """
        Загрузка данных в модель Shop (первоначальная информация о магазине, товарах и категориях)
        В Postman в body нужно выбрать form-data и задать key 'filename' (тип file) с указание файла в поле value
        """

        filename = request.data['filename']
        if filename:
            try:
                FileExtensionValidator(allowed_extensions=[
                    'yaml',
                ]).__call__(filename)
                data = load_yaml(filename.read(), Loader=Loader)
                # # insert to Shop model
                shop_name = data['shop']
                new_filename = f'uploads/{shop_name}.json'
                with open(new_filename, 'w') as file:
                    json.dump(data, file)
                    shop, _ = Shop.objects.get_or_create(
                        name=shop_name,
                        init_file_content=data,
                        filename=new_filename,
                    )
                # insert to Categories model
                for category in data['categories']:
                    category, _ = Category.objects.get_or_create(
                        id=category['id'],
                        name=category['name'],
                    )
                    category.shops.add(shop.id)
                    category.save()
                # insert to Categories model
                ProductInfo.objects.filter(shop_id=shop.id).delete()
                for item in data['goods']:
                    product, _ = Product.objects.get_or_create(
                        name=item['name'], category_id=item['category'])
                    product_info = ProductInfo.objects.create(
                        product_id=product.id,
                        external_id=item['id'],
                        model=item['model'],
                        price=item['price'],
                        price_rrc=item['price_rrc'],
                        quantity=item['quantity'],
                        shop_id=shop.id,
                        # name=item['name'],
                    )
                    for name, value in item['parameters'].items():
                        parameter_object, _ = Parameter.objects.get_or_create(
                            name=name)
                        ProductParameter.objects.create(
                            product_info_id=product_info.id,
                            parameter_id=parameter_object.id,
                            value=value,
                            # name=item['name'],
                        )
                return JsonResponse({'Status': True})
            except (ValidationError, IntegrityError) as e:
                return JsonResponse({'Status': False, 'Error': str(e)})
        return JsonResponse({
            'Status': False,
            'Errors': 'Не указаны все необходимые аргументы'
        })
コード例 #42
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--root',
                        required=True,
                        help="Root path of Turbulenz Engine")
    parser.add_argument('--assets-path',
                        required=True,
                        action='append',
                        help="Path to root of source assets")
    parser.add_argument('--build-path',
                        default=path_join('_build', 'assets'),
                        help="Path for intermediate build files")
    parser.add_argument('--install-path',
                        default='staticmax',
                        help="Path to install output assets into")
    parser.add_argument('--verbose', action='store_true')
    parser.add_argument(
        '--imagemagick-convert',
        help="Path to ImageMagick convert executable (enables TGA support)")
    try:
        default_num_threads = multiprocessing.cpu_count()
    except NotImplementedError:
        default_num_threads = 1
    parser.add_argument('-j',
                        '--num-threads',
                        help="Specify how many threads to use for building",
                        default=default_num_threads,
                        type=int)

    args = parser.parse_args(argv[1:])

    assets_paths = [normpath(p) for p in args.assets_path]
    base_build_path = normpath(args.build_path)
    create_dir(base_build_path)
    create_dir(args.install_path)

    tools = Tools(args, base_build_path)

    with open('deps.yaml', 'r') as f:
        asset_build_info = load_yaml(f.read())
        if asset_build_info:
            asset_build_info = [
                AssetInfo(asset_info) for asset_info in asset_build_info
            ]
        else:
            asset_build_info = []

    try:
        with open(path_join(base_build_path, 'sourcehashes.json'), 'r') as f:
            source_list = SourceList(load_json(f.read()), assets_paths)
    except IOError:
        if args.verbose:
            print 'No source hash file'
        source_list = SourceList({}, assets_paths)

    # Ensure all sources are in the source list so that the threads aren't writing to the list
    for a in asset_build_info:
        source_list.get_source(a.path)

    class AssetBuildThread(Thread):
        def __init__(self, asset_list, asset_list_mutex):
            Thread.__init__(self)
            self.asset_list = asset_list
            self.mutex = asset_list_mutex
            self.assets_rebuilt = 0
            self.exit = False
            self.error = None

        def run(self):
            while True:
                if self.exit:
                    return 0
                self.mutex.acquire(True)
                try:
                    # Try and pull the head off the list and if all it's dependencies are already built then
                    # build it. This could iterate down the remaining list in case the head isn't buildable but
                    # things later in the list are
                    asset_info = self.asset_list[0]
                    deps = [
                        source_list.get_source(path)
                        for path in asset_info.deps if path != asset_info.path
                    ]
                    if any([not d.built for d in deps]):
                        self.mutex.release()
                        sleep(0.01)
                        continue
                    self.asset_list.pop(0)
                    self.mutex.release()
                except IndexError:
                    self.mutex.release()
                    return 0
                try:
                    rebuild = build_asset(asset_info, source_list, tools,
                                          base_build_path, args.verbose)
                except CalledProcessError as e:
                    self.error = '%s - Tool failed - %s' % (asset_info.path,
                                                            str(e))
                    return 1
                except IOError as e:
                    self.error = str(e)
                    return 1

                if rebuild:
                    self.assets_rebuilt += 1

    num_threads = args.num_threads

    # Sort assets by dependencies
    assets_to_build = []
    while len(assets_to_build) != len(asset_build_info):
        num_assets_sorted = len(assets_to_build)
        for asset in asset_build_info:
            if asset in assets_to_build:
                continue
            for dep in asset.deps:
                if dep != asset.path and dep not in [
                        a.path for a in assets_to_build
                ]:
                    break
            else:
                assets_to_build.append(asset)
        if num_assets_sorted == len(assets_to_build):
            assets_left = [
                a for a in asset_build_info if a not in assets_to_build
            ]
            error('Detected cyclic dependencies between assets within - \n%s' %
                  '\n'.join([a.path for a in assets_left]))
            return 1

    # Create and start threads to build the assets in the sorted dependency list
    asset_threads = []
    asset_list_mutex = Lock()
    for t in xrange(num_threads):
        asset_threads.append(
            AssetBuildThread(assets_to_build, asset_list_mutex))

    for t in xrange(num_threads):
        asset_threads[t].start()

    while any(a.isAlive() for a in asset_threads):
        for t in xrange(num_threads):
            asset_threads[t].join(0.1)
            if not asset_threads[t].isAlive() and asset_threads[t].error:
                # One thread has an error ask all the others to finish asap
                for o in xrange(num_threads):
                    asset_threads[o].exit = True

    # Update the stats on number of assets rebuilt
    assets_rebuilt = 0
    for t in xrange(num_threads):
        assets_rebuilt += asset_threads[t].assets_rebuilt

    # Dump the state of the build for partial rebuilds
    with open(path_join(base_build_path, 'sourcehashes.json'), 'w') as f:
        f.write(dump_json(source_list.get_hashes()))

    # Check if any build threads failed and if so exit with an error
    for t in xrange(num_threads):
        if asset_threads[t].error:
            error(asset_threads[t].error)
            return 1

    # Dump the mapping table for the built assets
    print 'Installing assets and building mapping table...'
    mapping = install(asset_build_info, args.install_path)
    with open('mapping_table.json', 'w') as f:
        f.write(dump_json({'urnmapping': mapping}))

    # Cleanup any built files no longer referenced by the new mapping table
    remove_old_build_files(asset_build_info, base_build_path)

    print '%d assets rebuilt' % assets_rebuilt
    print 'Assets build complete'
コード例 #43
0
def main(argv=None):

    argv = argv or sys.argv[1:]
    op = OptionParser(
        usage="Usage: %prog [options] [path/to/source/directory]"
        )

    op.add_option('-d', dest='data_file', default='.articlestore',
                  help="Set the path for a data file (default: .articlestore)")

    op.add_option('-o', dest='output_directory', default='website',
                  help="Set the output directory for files (default: website)")

    op.add_option('-p', dest='package', default='',
                  help="Generate documentation for a Python package (optional)")

    op.add_option('--clean', dest='clean', default=False, action='store_true',
                  help="Flag to remove all generated output files")

    op.add_option('--force', dest='force', default=False, action='store_true',
                  help="Flag to force regeneration of all files")

    op.add_option('--quiet', dest='quiet', default=False, action='store_true',
                  help="Flag to suppress output")

    try:
        options, args = op.parse_args(argv)
    except SystemExit:
        return

    # Normalise various options and load from the config file.
    if args:
        source_directory = args[0]
        source_directory_specified = True
    else:
        source_directory = getcwd()
        source_directory_specified = False

    source_directory = abspath(source_directory)
    chdir(source_directory)

    if not isdir(source_directory):
        raise IOError("%r is not a directory!" % source_directory)

    config_file = join_path(source_directory, 'yatiblog.conf')

    if isfile(config_file):
        config_file_obj = open(config_file, 'rb')
        config_data = config_file_obj.read()
        config_file_obj.close()
        config = load_yaml(config_data)
    elif not source_directory_specified:
        raise IOError("Couldn't find: %s" % config_file)
    else:
        config = {}

    index_pages = config.pop('index_pages', [])
    if not isinstance(index_pages, list):
        raise ValueError("The 'index_pages' config value is not a list!")

    index_pages = dict(
        (index_page.keys()[0], index_page.values()[0])
        for index_page in index_pages
        )

    output_directory = join_path(source_directory, options.output_directory.rstrip('/'))
    if not isdir(output_directory):
        if not exists(output_directory):
            mkdir(output_directory)
        else:
            raise IOError("%r is not a directory!" % output_directory)

    code_pages = config.pop('code_pages', {})

    if code_pages:

        code_layout = code_pages['layout']
        code_paths = code_pages['paths']
        code_files = {}

        git_root = realpath(SCMConfig().root)

        for output_filename, input_pattern in code_paths.items():

            ignore_pattern = None
            if isinstance(input_pattern, dict):
                definition = input_pattern
                input_pattern = definition['pattern']
                if 'ignore' in definition:
                    ignore_pattern = definition['ignore']

            files = run_command(['git', 'ls-files', input_pattern], cwd=git_root)
            files = filter(None, files.splitlines())

            if ignore_pattern is not None:
                ignore_files = run_command(
                    ['git', 'ls-files', ignore_pattern], cwd=git_root
                    )
                for file in ignore_files.splitlines():
                    if file in files:
                        files.remove(file)

            if '%' in output_filename:
                output_pattern = True
            else:
                output_pattern = False

            for file in files:
                directory = basename(dirname(file))
                filename, ext = splitext(basename(file))
                if output_pattern:
                    dest = output_filename % {
                        'dir':directory, 'filename':filename, 'ext':ext
                        }
                else:
                    dest = output_filename
                code_files[
                    join_path(output_directory, dest + '.html')
                    ] = [file, join_path(git_root, file)]

    else:
        code_files = {}
        code_layout = None

    verbose = not options.quiet

    # See if there's a persistent data file to read from.
    data_file = join_path(source_directory, options.data_file)
    if isfile(data_file):
        data_file_obj = open(data_file, 'rb')
        data_dict = load_pickle(data_file_obj)
        data_file_obj.close()
    else:
        data_dict = {}

    # Persist the data file to disk.
    def persist_data_file():
        if data_file:
            data_file_obj = open(data_file, 'wb')
            dump_pickle(data_dict, data_file_obj)
            data_file_obj.close()

    atexit.register(persist_data_file)

    # Figure out what the generated files would be.
    source_files = [
        file for file in listfiles(source_directory) if file.endswith('.txt')
        ]

    generated_files = [
        join_path(output_directory, splitext(file)[0] + '.html')
        for file in source_files
        ]

    index_files = [join_path(output_directory, index) for index in index_pages]

    # Handle --clean support.
    if options.clean:
        for file in generated_files + index_files + [data_file] + code_files.keys():
            if isfile(file):
                if verbose:
                    print "Removing: %s" % file
                rm(file)
        data_dict.clear()
        sys.exit()

    # Figure out layout dependencies for the source .txt files.
    layouts = {}
    sources = {}

    def init_rst_source(source_file, destname=None):

        source_path = join_path(source_directory, source_file)
        source_file_obj = open(source_path, 'rb')
        content = source_file_obj.read()
        source_file_obj.close()

        if not content.startswith('---'):
            return

        filebase, filetype = splitext(source_file)
        filebase = filebase.lower()

        env = load_yaml(match_yaml_frontmatter(content).group(1))
        layout = env.pop('layout')

        if layout not in layouts:
            load_layout(layout, source_directory, layouts)

        content = replace_yaml_frontmatter('', content)

        if MORE_LINE in content:
            lead = content.split(MORE_LINE)[0]
            content = content.replace(MORE_LINE, '')
        else:
            lead = content

        if destname:
            destname = join_path(output_directory, destname)
        else:
            destname = join_path(output_directory, filebase + '.html')

        sources[source_file] = {
            '__content__': content,
            '__deps__': find_include_refs(content),
            '__env__': env,
            '__genfile__': destname,
            '__id__': source_file,
            '__layout__': layout,
            '__lead__': lead,
            '__mtime__': stat(source_path).st_mtime,
            '__name__': basename(destname), # filebase,
            '__outdir__': output_directory,
            '__path__': source_path,
            '__rst__': True,
            '__type__': 'text',
            '__filetype__': filetype
            }

    for source_file in source_files:
        init_rst_source(source_file)

    # And likewise for any source code files.
    def init_rst_source_code(relative_source_path, source_path, destname):

        source_file_obj = open(source_path, 'rb')
        content = source_file_obj.read()
        source_file_obj.close()

        filebase, filetype = splitext(basename(source_path))
        filebase = filebase.lower()

        if not filetype:
            if content.startswith('#!'):
                content = content.split('\n', 1)
                if len(content) == 2:
                    shebang, content = content
                else:
                    shebang = content[0]
                    content = ''
                for interp, ext in SHEBANGS:
                    if interp in shebang:
                        filetype = ext
                        break
            if not filetype:
                raise ValueError("Unknown file type: %s" % source_path)

        sources[source_path] = {
            '__content__': content,
            '__deps__': [],
            '__env__': {'title': filebase},
            '__genfile__': destname,
            '__gitpath__': relative_source_path,
            '__id__': source_path,
            '__layout__': code_layout,
            '__lead__': '',
            '__mtime__': stat(source_path).st_mtime,
            '__name__': basename(destname), # filebase,
            '__outdir__': output_directory,
            '__path__': source_path,
            '__rst__': True,
            '__type__': 'code',
            '__filetype__': filetype
            }

    if code_layout and code_layout not in layouts:
        load_layout(code_layout, source_directory, layouts)

    for destname, (relative_source_path, source_path) in code_files.items():
        init_rst_source_code(relative_source_path, source_path, destname)

    # And likewise for the ``index_pages``.
    render_last = set()

    for index_page, index_source in index_pages.items():
        layout, filetype = splitext(index_source)
        if filetype == '.genshi':
            if layout not in layouts:
                load_layout(layout, source_directory, layouts)
            source_path = join_path(source_directory, '_layouts', index_source)
            sources[index_source] = {
                '__content__': '',
                '__deps__': [],
                '__env__': {},
                '__genfile__': join_path(output_directory, index_page),
                '__id__': index_source,
                '__layout__': layout,
                '__lead__': '',
                '__mtime__': stat(source_path).st_mtime,
                '__name__': basename(index_page),
                '__outdir__': output_directory,
                '__path__': source_path,
                '__rst__': False,
                '__type__': 'index',
                '__filetype__': 'genshi'
                }
        else:
            init_rst_source(index_source, index_page)
        render_last.add(index_source)

    # Update the envs for all the source files.
    for source in sources:
        info = sources[source]
        layout = info['__layout__']
        layout_info = layouts[layout]
        if layout_info['__deps__']:
            for dep_layout in reversed(layout_info['__deps__']):
                info.update(layouts[dep_layout]['__env__'])
        info.update(layouts[layout]['__env__'])
        info.update(get_git_info(info['__path__']))
        info.update(info.pop('__env__'))

    # Figure out which files to regenerate.
    if not options.force:

        no_regen = set()
        for source in sources:

            info = sources[source]
            try:
                gen_mtime = stat(info['__genfile__']).st_mtime
            except:
                continue

            dirty = False
            if gen_mtime < info['__mtime__']:
                dirty = True

            layout = info['__layout__']
            layout_info = layouts[layout]
            if layout_info['__deps__']:
                layout_chain = [layout] + layout_info['__deps__']
            else:
                layout_chain = [layout]

            for layout in layout_chain:
                if gen_mtime < layouts[layout]['__mtime__']:
                    dirty = True
                    break

            for dep in info['__deps__']:
                dep_mtime = stat(join_path(source_directory, dep)).st_mtime
                if gen_mtime < dep_mtime:
                    dirty = True
                    break

            if not dirty:
                no_regen.add(source)

        for source in no_regen:
            if source in render_last:
                continue
            del sources[source]

        remaining = set(sources.keys())
        if remaining == render_last:
            for source in remaining.intersection(no_regen):
                del sources[source]

    BLANK_CODE_LINE = '<div class="syntax"><pre><div class="syntax"><pre></pre></div>'

    # Regenerate!
    items = sorted(sources.items(), key=lambda x: x[1]['__rst__'] == False)

    for source, source_info in items:

        info = config.copy()
        info.update(source_info)

        if verbose:
            print
            print LINE
            print 'Converting: [%s] %s' % (info['__type__'], info['__path__'])
            print LINE
            print

        if info['__type__'] == 'code':

            content = info['__content__']
            conf = PROGLANGS[info['__filetype__']]
            if conf[2]:
                content = conf[2](content)
            comment_matcher = conf[3]

            lines = content.split('\n')
            include_section = None

            if lines and lines[0].startswith('#!'):
                lines.pop(0)

            sections = []; new_section = sections.append
            docs_text = []; docs_out = docs_text.append
            code_text = []; code_out = code_text.append

            for line in lines:
                if comment_matcher.match(line):
                    line = comment_matcher.sub('', line)
                    if line == '<yatiblog.comment>':
                        include_section = 1
                    else:
                        docs_out(line)
                else:
                    if not line.strip():
                        if docs_text and not include_section:
                            last_line = docs_text[-1].strip()
                            if last_line:
                                last_line_char = last_line[0]
                                for char in last_line:
                                    if char != last_line_char:
                                        break
                                else:
                                    include_section = 1
                    else:
                        if docs_text:
                            include_section = 1
                    if docs_text:
                        if include_section:
                            new_section({
                                'docs_text': '\n'.join(docs_text) + '\n',
                                'code_text': '\n'.join(code_text)
                                })
                            docs_text[:] = []
                            code_text[:] = []
                            include_section = None
                        else:
                            docs_text[:] = []
                        code_out(line)
                    else:
                        code_out(line)

            new_section({'docs_text': '', 'code_text': '\n'.join(code_text)})

            docs = conf[6].join(part['docs_text'] for part in sections)
            code = conf[4].join(part['code_text'] for part in sections)

            docs_html, props = render_rst(docs, with_props=1)
            if ('title' in props) and props['title']:
                info['title'] = props['title']

            code = code.replace('\t', '    ')
            code_html = highlight(code, get_lexer_by_name(conf[0]), SYNTAX_FORMATTER)

            docs_split = conf[7].split(docs_html)
            code_split = conf[5].split(code_html)
            output = info['__output__'] = []
            out = output.append

            if docs_split and docs_split[0]:
                diff = 0
                docs_split.insert(0, u'')
            else:
                diff = 1

            last = len(docs_split) - 2
            for i in range(last + 1):
                code = code_split[i+diff].split(u'<br/>')
                while (code and code[0] == ''):
                    code.pop(0)
                while (code and code[-1] == ''):
                    code.pop()
                code = u'<br />'.join(code)
                if code:
                    if i == last:
                        code = u'<div class="syntax"><pre>' + code
                    else:
                        code = u'<div class="syntax"><pre>' + code + "</pre></div>"
                out((docs_split[i], code))

            while output and output[0][1] == BLANK_CODE_LINE:
                if not output[0][0]:
                    output.pop(0)

        elif info['__rst__']:
            with_props = info.get('with_props', False)
            if with_props:
                output, props = render_rst(info['__content__'], with_props=1)
                if ('title' in props) and props['title']:
                    info['title'] = props['title']
                info['__output__'] = output
            else:
                output = info['__output__'] = render_rst(info['__content__'])

            if info['__lead__'] == info['__content__']:
                info['__lead_output__'] = info['__output__']
            else:
                info['__lead_output__'] = render_rst(info['__lead__'])
        else:
            output = ''

        layout = info['__layout__']
        layout_info = layouts[layout]

        if layout_info['__deps__']:
            layout_chain = [layout] + layout_info['__deps__']
        else:
            layout_chain = [layout]

        for layout in layout_chain:
            template = layouts[layout]['__template__']
            output = template.generate(
                content=output,
                yatidb=data_dict,
                **info
                ).render('xhtml', encoding=None)

        if isinstance(output, unicode):
            output = output.encode('utf-8')

        data_dict[info['__name__']] = info

        output_file = open(info['__genfile__'], 'wb')
        output_file.write(output)
        output_file.close()

        if verbose:
            print 'Done!'

    sys.exit()
コード例 #44
0
def main(argv=None, abort=False, debug=None):
    """Drive the validator.
    This function acts as the command line interface backend.
    There is some duplication to support testability.
    """
    init_logger(level=logging.DEBUG if debug else None)
    forest = argv if argv else sys.argv[1:]
    if not forest:
        print("Usage: gelee paths-to-files")
        return 0, "USAGE"
    num_trees = len(forest)
    LOG.debug("Guarded dispatch forest=%s, num_trees=%d", forest, num_trees)

    LOG.info("Starting validation visiting a forest with %d tree%s", num_trees,
             '' if num_trees == 1 else 's')
    failure_path_reason = "Failed validation for path %s with error: %s"
    total, folders, ignored, csvs, inis, jsons, tomls, xmls, yamls = 0, 0, 0, 0, 0, 0, 0, 0, 0
    failures = 0
    for tree in forest:
        for path in visit(tree):
            LOG.debug(" - path=%s, total=%d", path, total)
            total += 1
            if not path.is_file():
                folders += 1
                continue

            final_suffix = '' if not path.suffixes else path.suffixes[
                -1].lower()

            if final_suffix == ".csv":
                if not path.stat().st_size:
                    LOG.error(failure_path_reason, path,
                              "ERROR: Empty CSV file")
                    if abort:
                        return 1, "ERROR: Empty CSV file"
                    failures += 1
                    continue

                with open(path, newline='') as handle:
                    try:
                        try:
                            dialect = csv.Sniffer().sniff(
                                handle.read(1024), ",\t; ")
                            handle.seek(0)
                        except csv.Error as err:
                            if "could not determine delimiter" in str(
                                    err).lower():
                                dialect = csv.Dialect()
                                dialect.delimiter = ','
                                dialect.quoting = csv.QUOTE_NONE
                                dialect.strict = True
                            else:
                                LOG.error(failure_path_reason, path,
                                          slugify(err))
                                if abort:
                                    return 1, str(err)
                                failures += 1
                        try:
                            reader = csv.reader(handle, dialect)
                            for _ in reader:
                                pass
                            csvs += 1
                        except csv.Error as err:
                            LOG.error(failure_path_reason, path, slugify(err))
                            if abort:
                                return 1, str(err)
                            failures += 1
                    except (Exception, csv.Error) as err:
                        LOG.error(failure_path_reason, path, slugify(err))
                        if abort:
                            return 1, str(err)
                        failures += 1
            elif final_suffix == ".ini":
                config = configparser.ConfigParser()
                try:
                    config.read(path)
                    inis += 1
                except configparser.NoSectionError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.DuplicateSectionError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.DuplicateOptionError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.NoOptionError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.InterpolationDepthError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.InterpolationMissingOptionError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.InterpolationSyntaxError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.InterpolationError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.MissingSectionHeaderError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
                except configparser.ParsingError as err:
                    LOG.error(failure_path_reason, path, slugify(err))
                    if abort:
                        return 1, str(err)
                    failures += 1
            elif final_suffix in (".geojson", ".json", ".toml"):
                loader = toml.load if final_suffix == ".toml" else json.load
                with open(path, "rt", encoding="utf-8") as handle:
                    try:
                        _ = loader(handle)
                        if final_suffix == ".toml":
                            tomls += 1
                        else:
                            jsons += 1
                    except Exception as err:
                        LOG.error(failure_path_reason, path, slugify(err))
                        if abort:
                            return 1, str(err)
                        failures += 1
            elif final_suffix == ".xml":
                if not path.stat().st_size:
                    LOG.error(failure_path_reason, path,
                              "ERROR: Empty XML file")
                    if abort:
                        return 1, "ERROR: Empty XML file"
                    failures += 1
                    continue

                xml_tree, message = load_xml(path)
                if xml_tree:
                    xmls += 1
                else:
                    LOG.error(failure_path_reason, path, slugify(message))
                    if abort:
                        return 1, str(message)
                    failures += 1
            elif final_suffix in (".yaml", ".yml"):
                with open(path, "rt", encoding="utf-8") as handle:
                    try:
                        _ = load_yaml(handle, Loader=LoaderYaml)
                        yamls += 1
                    except Exception as err:
                        LOG.error(failure_path_reason, path, slugify(err))
                        if abort:
                            return 1, str(err)
                        failures += 1
            else:
                ignored += 1
                continue

    success = "Successfully validated"
    pairs = (
        (csvs, "CSV"),
        (inis, "INI"),
        (jsons, "JSON"),
        (tomls, "TOML"),
        (xmls, "XML"),
        (yamls, "YAML"),
    )
    for count, kind in pairs:
        if count:
            LOG.info("- %s %d total %s file%s.", success, count, kind,
                     "" if count == 1 else "s")

    configs = csvs + inis + jsons + tomls + xmls + yamls
    LOG.info(  # TODO remove f-strings also here
        f"Finished validation of {configs} configuration file{'' if configs == 1 else 's'}"
        f" with {failures} failure{'' if failures == 1 else 's'}"
        f" visiting {total} path{'' if total == 1 else 's'}"
        f" (ignored {ignored} non-config file{'' if ignored == 1 else 's'}"
        f" in {folders} folder{'' if folders == 1 else 's'})")
    print(f"{'OK' if not failures else 'FAIL'}")

    return 0, ""
コード例 #45
0
def cmd_run(args):
    """
    Simulate an economy for a specified number of steps
    """
    description = {}
    logger.info('parsing input files...')
    for desc_file in args.description_file:
        data = load_yaml(desc_file)
        if data is None:
            data = {}
        description.update(data)
        desc_file.close()
    logger.info('input files parsed')

    # Extract major data substructures
    config_system = description.get('system', None)
    config_market = description.get('market', None)
    config_careers = description.get('careers', None)
    config_units = description.get('units', None)
    config_unit_ids = description.get('next_unit_ids', None)

    # Validate the system data substructure
    try:
        if not isinstance(config_system, dict):
            raise ValueError('system configuration is not a dictionary')
        if not isinstance(config_system['t'], int):
            raise ValueError('iteration number t is not an integer')
        if not isinstance(config_system['interest_rate'], float):
            raise ValueError('interest rate is not a float')
        if not isinstance(config_system['min_balance'], float):
            raise ValueError('minimum balance is not a float')
        if not isinstance(config_system['max_age'], int):
            raise ValueError('maximum age is not an integer')
        if not isinstance(config_system['eat_every'], int):
            raise ValueError('eat interval is not an integer')
        if not isinstance(config_system['spawn_every'], int):
            raise ValueError('spawn interval is not an integer')

        # Parse the data substructures
        market = parse_market(config_market)
        careers = parse_careers(config_careers, market)
        units = parse_units(config_units, careers)
        parse_unit_ids(config_unit_ids)
    except (KeyError, ValueError) as exc:
        logger.error(exc.message, exc_info=True)
        exit(1)

    t_0 = config_system['t']
    rate = config_system['interest_rate']
    min_balance = config_system['min_balance']
    max_age = config_system['max_age']
    eat_every = config_system['eat_every']
    spawn_every = config_system['spawn_every']

    # Run the economy
    from .market import ask_at
    for t in xrange(t_0, t_0 + args.steps):
        # Step time
        step_time(t,
                  market,
                  careers,
                  units,
                  rate,
                  min_balance=min_balance,
                  max_age=max_age,
                  eat_every=eat_every,
                  spawn_every=spawn_every)

        # Calculate population
        population = 0
        for career_rec in careers.values():
            population += career_rec['stats']['population']

        # Inflate
        inflate(market, population)

        # Display aggregate statistics every report_interval
        if (t % args.report_interval) == 0:
            logger.info('market: %r', {k: ask_at(market, k) for k in market})

            res_stats = {
                k: '%(bought)d bot, %(sold)d sld' % market[k]
                for k in market
            }
            logger.info('market: %r', res_stats)
            for career, career_rec in careers.items():
                logger.info('t=%06d: career %s: %r', t, career,
                            career_rec['stats'])

    # Save the results
    config_system['t'] = t
    results = dict(system=config_system,
                   market=save_market(market),
                   careers=save_careers(careers),
                   units=save_units(units),
                   next_unit_ids=save_unit_ids())
    print save_yaml(results)
コード例 #46
0
ファイル: play.py プロジェクト: nandub/phoebe
def main():
    # write PID to file
    with open('player_pidfile', 'w') as pidfile:
        print >> pidfile, getpid()

    # check length of arguments;
    # 1 = error, 2 = idle, 3 = media, 4 = media w/modifier
    stream_id = None
    media_uri = None
    live_source = False

    if len(argv) < 2:
        logging.critical('error: no stream ID specified.')
        sys_exit(4)

    stream_id = argv[1]

    if len(argv) > 2:
        # media; capture media_uri
        media_uri = argv[2]

    if len(argv) > 3 and argv[3] == 'live':
        live_source = True

    # import config
    global_config = None
    with open('config.yaml', 'r') as config_file:
        global_config = load_yaml(config_file)
        logging.info('configuration file loaded and parsed.')

        if type(global_config) is not dict:
            logging.critical(
                'error: configuration file parsed into invalid type.')
            sys_exit(2)

        if len(global_config) <= 0:
            logging.critical(
                'error: configuration file parsed into empty object.')
            sys_exit(3)

    # craft process title from name (p-{name})
    process_title = 'pp-{}'.format(global_config['name'])

    # set loglevel
    target_lvl = global_config['log_level']
    if hasattr(logging, target_lvl):
        logging.getLogger().setLevel(getattr(logging, target_lvl))

    # set lock to prevent concurrency and set proctitle
    global lock_socket
    lock_socket = socket(AF_UNIX, SOCK_DGRAM)
    try:
        lock_socket.bind('\0{}'.format(process_title))
        logging.info('got process lock')
    except socket_error:
        logging.critical('failed to get process lock; already running?')
        sys_exit(1)

    # set custom process title
    setproctitle(process_title)

    # declare now to allow access by _exit
    state = None
    runtime = None
    conn = None
    listener = None

    def _exit():
        logging.debug(
            'stopping player, closing control connection, and exiting')
        if runtime:
            # 0: init   1: started    2: stopped
            if state.value == 1:
                runtime.stop()
        if conn:
            conn.close()
        if listener:
            listener.close()

    # handle signals gracefully
    def _exit_on_signal(signal, frame):
        logging.warning('caught signal {}'.format(str(signal)))
        _exit()

    signal(SIGABRT, _exit_on_signal)
    signal(SIGINT, _exit_on_signal)
    signal(SIGHUP, _exit_on_signal)
    signal(SIGQUIT, _exit_on_signal)
    signal(SIGTERM, _exit_on_signal)

    state = Value('i', 0)

    # create new runtime object based on type
    runtime = None
    if media_uri:
        runtime = Player(global_config['SquishPlayer'], state, stream_id,
                         media_uri, live_source)
    else:
        runtime = Idler(global_config['SquishPlayer'], state, stream_id)

    # set up listener for comms with bot
    address = global_config['control_socket_file']
    listener = Listener(address, authkey='phoebe')

    # block on connection from bot
    logging.debug(
        'awaiting control connection on socket at {}'.format(address))
    conn = listener.accept()

    # connection made; start runtime (runs in new thread)
    logging.debug('connection accepted')
    runtime.start()

    # enter command loop (in this thread)
    while True:
        # exit if player mainloop no longer running
        # 0: init   1: started    2: stopped
        if state.value == 2:
            logging.info('player thread no longer alive')
            break

        # check for a command
        if not conn.poll(1):
            continue

        try:
            # wait for a command
            command = conn.recv()
        except (EOFError, IOError):
            logging.error('Error encountered when attempting '
                          'to receive from control connection')
            break

        # parse into name/optional args
        cmd_name = command[0].lower()
        cmd_arg = None
        if len(command) > 1:
            cmd_arg = command[1]

        # execute command actions
        if cmd_name == 'play':
            runtime.play()

        # stop player and exit
        elif cmd_name == 'stop':
            logging.debug('stopping player on command')
            break

        # retrieve current position, duration
        elif cmd_name == 'getpos':
            if hasattr(runtime, 'get_play_position'):
                pos = runtime.get_play_position()
                if pos:
                    conn.send(['OK', pos])
                else:
                    conn.send(['ERROR', 'no position available'])
            else:
                conn.send(['ERROR', 'getpos not supported by active runtime'])

        elif cmd_name == 'getlivepos':
            if hasattr(runtime, 'get_live_play_position'):
                pos = runtime.get_live_play_position()
                if pos:
                    conn.send(['OK', pos])
                else:
                    conn.send(['ERROR', 'no live position available'])
            else:
                conn.send(
                    ['ERROR', 'getlivepos not supported by active runtime'])

        # seek by specified amount
        elif cmd_name == 'seek':
            if hasattr(runtime, 'seek'):
                if cmd_arg:
                    result = runtime.seek(cmd_arg)
                    if result:
                        conn.send(['OK'])
                    else:
                        conn.send(['ERROR', 'seek failed'])
            else:
                conn.send(['ERROR', 'seek not supported by active runtime'])

        # jump to specified position
        elif cmd_name == 'jump':
            if hasattr(runtime, 'seek'):
                error = False
                if cmd_arg:
                    pos = runtime.get_play_position()
                    if pos:
                        jump_to = cmd_arg - pos[0]
                        result = runtime.seek(jump_to)
                        if result:
                            conn.send(['OK'])
                        else:
                            error = True
                    else:
                        error = True
                if error:
                    conn.send(['ERROR', 'jump failed'])
            else:
                conn.send(['ERROR', 'jump not supported by active runtime'])

    # out of command loop; clean up and exit
    logging.debug('exited command loop')
    _exit()
コード例 #47
0
        return file_manager.load_analysis_from_url(pathname)
        # replay_file = file_manager.get_replay_from_url(pathname)
        # return replay_analyzer.get_analysis_for_file(replay_file)
    # If the user tries to reach a different page, return a 404 message
    return dbc.Jumbotron([
        html.H1("404: Not found", className="text-danger"),
        html.Hr(),
        html.P(f"The pathname {pathname} was not recognised..."),
    ])


if __name__ == "__main__":
    player_names = []
    try:
        with open("config.yaml", "r") as yamlfile:
            cfg = load_yaml(yamlfile, Loader=FullLoader)
    except FileNotFoundError:
        print(
            "Config file doesn't exist, please create a file named 'config.yaml' in the same directory as "
            "benchmarker.py")
        exit(1)

    if 'directories' not in cfg:
        print("Config file does not contain a directories section.")
        exit(1)

    try:
        player_names = cfg['general']['player_names'].split(',')
    except KeyError as e:
        print(
            "Unable to parse player name(s). Must be under general and formatted as a comma separated list."
コード例 #48
0
ファイル: yaml.py プロジェクト: fchauvel/camp
 def load_test_reports(self, stream):
     return load_yaml(stream)
コード例 #49
0
def load_partner_info(url=None, file_obj=None, user_id=0):
    """
    Обновление прайса от поставщика
    """

    if not url and not (file_obj and isinstance(file_obj, FileClass)):
        return ResponseBadRequest('Не указаны все необходимые аргументы. Нужно указать url или загрузить файл')
    if file_obj:
        stream = file_obj.read()
        _, extension = os.path.splitext(file_obj.name)
        mime = ''
    else:
        validate_url = URLValidator()
        try:
            validate_url(url)
        except ValidationError as e:
            return ResponseBadRequest(e)

        try:
            response = get(url)
            response.raise_for_status()
        except RequestException as e:
            return ResponseNotFound(e)
        _, extension = os.path.splitext(url)
        stream = response.content
        mime = response.headers.get('content-type')
    try:
        if mime in ('application/yaml', 'text/yaml'):
            data = load_yaml(stream, Loader=Loader)
        elif mime in ('application/json', 'text/json'):
            data = load_json(stream)
        elif mime in ('application/xml', 'text/xml'):
            data = load_xml(stream)
        elif extension == '.yaml':
            data = load_yaml(stream, Loader=Loader)
        elif extension == '.json':
            data = load_json(stream)
        elif extension == '.xml':
            data = load_xml(stream)
        else:
            return ResponseBadRequest('Не опознан формат файла {}', url)
    except (ParseError, YAMLError, ValueError, TypeError) as e:
        return ResponseBadRequest('Некорректный формат файла: {}', e)

    # Check format:
    if not is_dict(data):
        return ResponseBadRequest('Некорректный формат файла: исходные данные должны представлять собой словарь')
    version = data.get('version')
    if not version or version != 'v1.0':
        return ResponseBadRequest('Некорректный формат файла: не поддерживается версия {}', version)
    if not data.get('shop'):
        return ResponseBadRequest('Некорректный формат файла: не задано/некорректное название магазина')
    categories = data.get('categories', [])
    if not is_list(categories):
        return ResponseBadRequest('Некорректный формат файла: категории должны быть заданы в списке')
    for category in categories:
        if not is_dict(category):
            return ResponseBadRequest('Некорректный формат файла: категории должны быть описаны как словарь') 
        if not category.get('name'):
            return ResponseBadRequest('Некорректный формат файла: не задано/некорректное название категории')
    goods = data.get('goods', [])
    if not is_list(goods):
        return ResponseBadRequest('Некорректный формат файла: товары должны быть заданы в списке')
    names = set()
    for item in goods:
        if not is_dict(item):
            return ResponseBadRequest('Некорректный формат файла: товары должны быть описаны как словарь') 
        name = item.get('name')
        category = item.get('category')
        price = to_decimal(item.get('price'))
        price_rrc = to_decimal(item.get('price_rrc'))
        quantity = to_positive_int(item.get('quantity'))
        if not name or not category or None in (price, price_rrc, quantity):
            return ResponseBadRequest('Некорректный формат файла: некорректно указана информация по продукту {}', name)
        if name in names:
            return ResponseBadRequest('Некорректный формат файла: продукты с одинаковым именем')
        names.add(name)
        parameters = item.get('parameters')
        if parameters is not None:
            if not is_list(parameters):
                return ResponseBadRequest('Некорректный формат файла: параметры для продукта {} должны быть заданы как массив полей name и value', name)
            parameter_names = set()
            for entry in parameters:
                if not is_dict(entry):
                    return ResponseBadRequest('Некорректный формат файла: параметр для продукта должен быть описан как словарь (продукт {})', name)
                par_name = entry.get('name')
                if not par_name or entry.get('value') is None:
                    return ResponseBadRequest('Некорректный формат файла: параметры для продукта {} должны иметь не пустые значения name и value', name)
                if par_name in parameter_names:
                    return ResponseBadRequest('Некорректный формат файла: параметры с одинаковым именем у продукта {}', name)
                parameter_names.add(par_name)
    # Actions:
    shop, _ = Shop.objects.get_or_create(name=data['shop'], defaults=dict(user_id=user_id))
    if shop.user_id != user_id:
        return ResponseForbidden('Магазин не принадлежит пользователю')
    for category in data.get('categories', []):
        category_object, _ = Category.objects.get_or_create(name=category['name'])
        category_object.shops.add(shop.id)
        category_object.save()
    ProductInfo.objects.filter(shop_id=shop.id).delete()
    for item in data.get('goods', []):           
        category_object, _ = Category.objects.get_or_create(name=item['category'])
        product, _ = Product.objects.get_or_create(name=item['name'], category_id=category_object.id)

        product_info = ProductInfo.objects.create(product_id=product.id,
                                                  external_id=item.get('id'),
                                                  price=item['price'],
                                                  price_rrc=item['price_rrc'],
                                                  quantity=item['quantity'],
                                                  shop_id=shop.id)
        for entry in item.get('parameters', []):
            parameter_object, _ = Parameter.objects.get_or_create(name=entry.get('name'))
            ProductParameter.objects.create(product_info_id=product_info.id,
                                            parameter_id=parameter_object.id,
                                            value=entry.get('value'))
    return ResponseCreated()