Exemple #1
0
def test_template_within_template():
    config_str = """
    {
        templates: {
            a_layer: {
                weight_init: $weight_init,
            },
            glorotuniform: 'funny glorot uniform template $range',
        },
        variants: [[
            {
                weight_init: [$glorotuniform], 
                range: [2], 
                layer:[$a_layer],
            },
        ]]
    }
    """
    
    yaml.add_constructor(u'!TransformValsToString', transform_vals_to_string_constructor)
    
    config_obj = yaml.load(config_str.replace("templates:", 
        "templates: !TransformValsToString"))
    
    all_params =  create_variants_recursively(config_obj['variants'])
    templates = config_obj['templates']
    final_params, templates_to_parameters = merge_parameters_and_templates(all_params, templates)
    assert final_params == [{'weight_init': "'funny glorot uniform template 2'\n",
        'range': 2,
        'layer': "{weight_init: 'funny glorot uniform template 2'\n}\n"}]
     
    assert templates_to_parameters ==  [dict(a_layer=set(['weight_init']),
                                   glorotuniform=set(['range']))]
Exemple #2
0
def parse(u):
    """
    Parse the contents of a spec test file, and return a dict.

    Arguments:

      u: a unicode string.

    """
    # TODO: find a cleaner mechanism for choosing between the two.
    if yaml is None:
        # Then use json.

        # The only way to get the simplejson module to return unicode strings
        # is to pass it unicode.  See, for example--
        #
        #   http://code.google.com/p/simplejson/issues/detail?id=40
        #
        # and the documentation of simplejson.loads():
        #
        #   "If s is a str then decoded JSON strings that contain only ASCII
        #    characters may be parsed as str for performance and memory reasons.
        #    If your code expects only unicode the appropriate solution is
        #    decode s to unicode prior to calling loads."
        #
        return json.loads(u)
    # Otherwise, yaml.

    def code_constructor(loader, node):
        value = loader.construct_mapping(node)
        return eval(value['python'], {})

    yaml.add_constructor('!code', code_constructor)
    return yaml.load(u)
Exemple #3
0
    def __init__(self, environment_name, nova_descriptor_file=None):
        self._nova_descriptor_file = nova_descriptor_file or 'nova.yml'
        self._environment_name = environment_name
        self._environment = None
        self._codedeploy_app = None

        self.templates_used = dict()
        yaml.add_constructor("!include", yaml_include)

        with open(os.path.join(spec.__path__[0], 'nova_service_schema.yml'), 'r') as schemaYaml:
            schema = yaml.load(schemaYaml)

        v = Validator(schema)
        try:
            with open(self._nova_descriptor_file, 'r') as novaYaml:
                self.service_spec = yaml.safe_load(novaYaml)

            # Validate loaded dictionary
            valid = v.validate(self.service_spec)
            if not valid:
                raise NovaError("Invalid nova service descriptor file '%s': %s" % (self._nova_descriptor_file, v.errors))
            else:
                self.service = Service.load(self.service_spec)
                self.service_name = self.service.name
                self.service_port = self.service.port
                self.service_healthcheck_url = self.service.healthcheck_url
        except IOError:
            raise NovaError("No nova service descriptor found at '%s'" % self._nova_descriptor_file)
Exemple #4
0
def get_conf_dict(file):
    mime_type = mimetypes.guess_type(file)[0]

    # Do the type check first, so we don't load huge files that won't be used
    if "xml" in mime_type:
        confile = open(file, "r")
        data = confile.read()
        confile.close()
        return xmltodict(data, "utf-8")
    elif "yaml" in mime_type:
        import yaml

        def custom_str_constructor(loader, node):
            return loader.construct_scalar(node).encode("utf-8")

        yaml.add_constructor(u"tag:yaml.org,2002:str", custom_str_constructor)
        confile = open(file, "r")
        data = confile.read()
        confile.close()
        return yaml.load(data)
    elif "json" in mime_type:
        import json

        confile = open(file, "r")
        data = confile.read()
        confile.close()
        return json.loads(data)

    return False
Exemple #5
0
def LoadConfigDict(config_paths, model_params):
  """Loads config dictionary from specified yaml files or command line yaml."""

  # Ensure that no duplicate keys can be loaded (causing pain).
  yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                       NoDuplicatesConstructor)

  # Handle either ',' or '#' separated config lists, since borg will only
  # accept '#'.
  sep = ',' if ',' in config_paths else '#'

  # Load flags from config file.
  final_config = {}
  if config_paths:
    for config_path in config_paths.split(sep):
      config_path = config_path.strip()
      if not config_path:
        continue
      config_path = os.path.abspath(config_path)
      tf.logging.info('Loading config from %s', config_path)
      with tf.gfile.GFile(config_path.strip()) as config_file:
        config_flags = yaml.load(config_file)
        final_config = DeepMergeDict(final_config, config_flags)
  if model_params:
    model_params = MaybeLoadYaml(model_params)
    final_config = DeepMergeDict(final_config, model_params)
  tf.logging.info('Final Config:\n%s', yaml.dump(final_config))
  return final_config
Exemple #6
0
def register_tags(server_root):
    def join(loader, node):
        seq = loader.construct_sequence(node, deep=True)
        return ''.join([str(i) for i in seq])

    def root(loader, node):
        return server_root + ('' if server_root.endswith('/') else '/')

    def replace(loader, node):
        seq = loader.construct_sequence(node, deep=True)
        return [x.replace(seq[0], seq[1]) for x in seq[2]]

    def prepend(loader, node):
        seq = loader.construct_sequence(node, deep=True)
        if isinstance(seq[1], str):
            return seq[0] + seq[1]
        return [seq[0] + s for s in seq[1]]

    def flatten(loader, node):
        seq = loader.construct_sequence(node, deep=True)
        result = []
        for item in seq:
            if isinstance(item, list):
                result.extend(item)
            else:
                result.append(item)
        return result

    yaml.add_constructor('!join', join)
    yaml.add_constructor('!root', root)
    yaml.add_constructor('!replace', replace)
    yaml.add_constructor('!prepend', prepend)
    yaml.add_constructor('!flatten', flatten)
def anonymize_file():
    """Entry point to convert yaml db file to anon version

    parameters are read from the command line.  call with '-h' for
    options and documentation

    """
    parser = argparse.ArgumentParser()
    parser.add_argument("file", type=file, help="the file to anonymize")
    parser.add_argument("-o", "--output",
                        help="file for output, by default hits stdout")
    args = parser.parse_args()
    if args.output:
        output = open(args.output, 'wb')
    else:
        output = sys.stdout

    yaml.add_constructor(u'!DAO', obj_loader)
    objects = yaml.load(args.file.read())
    for obj in objects:
        obj.anonymize()

    output.write(yaml.dump(objects, default_flow_style=False))
    
    if args.output:
        output.close()
def main():
    requestsexceptions.squelch_warnings()
    yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                         construct_yaml_map)

    yaml.add_representer(OrderedDict, project_representer,
                         Dumper=IndentedDumper)

    data = yaml.load(open('openstack_catalog/web/static/assets.yaml'))

    assets = []
    for a in data['assets']:
        url = a.get('attributes', {}).get('url')
        if not a.get('active', True) or not url:
            assets.append(a)
            continue

        r = requests.head(url, allow_redirects=True)
        if r.status_code != 200:
            a['active'] = False
        else:
            hash_url = a.get('hash_url')
            if hash_url:
                hashes = get_hashes(hash_url)
                filename = url.split("/")[-1]
                a['attributes']['hash'] = hashes.get(filename, 'unknown')

        assets.append(a)

    output = {'assets': assets}
    with open('openstack_catalog/web/static/assets.yaml', 'w') as out:
        out.write(yaml.dump(output, default_flow_style=False,
                            Dumper=IndentedDumper, width=80,
                            indent=2))
Exemple #9
0
def load(yaml_data, **kwargs):
    ''' convert yaml data into a configuration
         @param yaml_data: yaml data to be parsed
         @param configData: do substitution from this data

         parsing includes two custom yaml tags

         !format does string.format substitution using mapping data in
         the node. kwargs override default values if present
    '''

    def _python_format(loader, node):
        ''' do python string formatting

            requires a mapping input
            special key "format" is the string to be formatted
            all other keys are passed as keyword arguments
        '''

        params = Bunch(loader.construct_mapping(node))

        # allow kwargs substitution
        for key, value in kwargs.items():
            if key in params:
                params[key] = value

        rv = params.format.format(**params) #pylint: disable=W0142
        return rv

    yaml.add_constructor('!format', _python_format)

    return _bunchify_tree(yaml.load(yaml_data))
Exemple #10
0
def get_all_host_facter_message():
    # 由于puppet的yaml文件是ruby格式的,需要进行转换
    yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object)
    yaml.add_constructor(u"!ruby/sym", construct_ruby_sym)
    # 获取所有有facters信息的主机文件名称
    for dirpath, dirnames, filenames in os.walk(yaml_dir):
        # 只需要处理yaml目录下得yaml结尾的文件
        if dirpath == yaml_dir:
            for file in filenames:
                file_name, file_ext = os.path.splitext(file)
                if file_ext == '.yaml':
                    host_yaml_path = yaml_dir + '/' + file
                    # 得到yaml文件内容, 字典形式
                    host_yaml_result_dict = yaml_file_handle(host_yaml_path)
                    # 对单个agent的数据进行处理
                    if host_yaml_result_dict:
                        # 有key为facts,所以可以直接查找facts key值
                        if host_yaml_result_dict.has_key('facts'):
                            data_dict = host_yaml_result_dict['facts']['values']
                        # 没有的直接取
                        else:
                            data_dict = host_yaml_result_dict['values']

                    # 现在就可以data数据进行处理,取得我们所需要得数据
                    result_dict = handle_facter_message(data_dict)
                    all_host_facter_message[file_name] = result_dict
    # return我们最终的数据结果集
    return all_host_facter_message
Exemple #11
0
def import_comments(request):
    """
    Import all comments to the database from the following locations:
        <folder>/blog/**/comments
        <folder>/pages/**/comments

    Comment file names are excepted to be the same as post/page names
    (excluding extension).

    """
    blog_info_cache = {}
    conf_loader = BlogConfLoader()

    # keep timezone info for datetime objects
    yaml.add_constructor(u'tag:yaml.org,2002:timestamp',
                         lambda cls, node: parse_datetime(node.value))

    def read_blog_info(folder):
        # read blog configuration
        blog_info = None
        blog_conf = os.path.join(folder, 'blog.conf')
        if folder in blog_info_cache:
            blog_info = blog_info_cache[folder]
        elif not os.path.exists(blog_conf):
            logger.info("no blog.conf in %s, directory skipped", folder)
        else:
            try:
                blog_info = conf_loader.load(blog_conf)
            except ConfLoaderError, e:
                logger.error(unicode(e))
                logger.info("directory %s skipped", root)
                return None
            else:
Exemple #12
0
def load_config(path):

    """validates, loads and configures the yaml document at the specified path

    :param path: the path to the file
    :return: the parsed yaml document
    :raises SchemaError: if the yaml document does not validate
    """

    validator = Core(source_file=path, schema_data=config_schema)
    validator.validate(raise_exception=True)

    pattern = re.compile(r'^(.*)<%= ENV\[\'(.*)\'\] %>(.*)$')
    yaml.add_implicit_resolver('!env_regex', pattern)

    def env_regex(loader, node):
        value = loader.construct_scalar(node)
        front, variable_name, back = pattern.match(value).groups()
        return str(front) + os.environ[variable_name] + str(back)

    yaml.add_constructor('!env_regex', env_regex)

    with open(path, 'r') as stream:
        doc = yaml.load(stream)
        return doc
    def load(self, check_duplicates=True):
        try:
            fp = open(self.path, "r")
        except OSError as e:
            msg = "Cannot open file %s (%s)" % (self.path, e)
            self.add_error(msg)
            return

        if check_duplicates:
            yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, no_duplicates_constructor)
        else:
            yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, constructor)

        try:
            lines = fp.readlines()
            self._contents = yaml.load("".join(lines))

        except (ConstructorError, OurYamlException) as e:
            msg = "Found issues in file %s\n%s" % (self.path, e)
            self.add_warning(msg)
            self.load(check_duplicates=False)

        except (TypeError, ValueError) as e:
            msg = "Cannot parse file %s\n%s" % (self.path, e)
            self.add_error(msg)
            return

        finally:
            fp.close()

        self.is_loaded = True
Exemple #14
0
def main(argv):
    """Transcribe YAML content into HTML through Django templates."""

    conf = generate_config(argv)

    settings.configure(
        TEMPLATE_DIRS=(conf["templates"],),
        TEMPLATE_LOADERS=(("django.template.loaders.cached.Loader", ("django.template.loaders.filesystem.Loader",)),),
    )
    import django.contrib.syndication.views  # Requires Django to be configured

    django.setup()

    yaml.add_constructor("tag:yaml.org,2002:timestamp", _timestamp_loader)

    recreate_dir(conf["output"])
    copy_static_content(conf["static"], conf["output"])

    for root, _, files in os.walk(conf["content"]):
        all_items = []

        item_root = os.path.relpath(root, conf["content"])
        output_root = os.path.join(conf["output"], item_root)
        if item_root != ".":
            os.mkdir(output_root)

        for file_name in files:
            content = yaml.load(open(os.path.join(root, file_name)))
            content["slug"] = os.path.splitext(file_name)[0]
            all_items.append(content)

        if all_items:
            output_all(all_items, item_root, output_root, conf["meta"])
Exemple #15
0
    def _enhanced_yaml_module(self, sysconfig):

        # NOTE: This is soo ugly, sorry for that, in future we need to modify
        # PyYAML to let us specify callbacks, somehow.  But for now, import
        # yaml right here (local import) to be able to add the
        # constructors/representers **only** locally (don't modify global
        # context).
        def _eval_node(loader, node):
            return str(eval(str(loader.construct_scalar(node)), {
                'project': self.project,
                'config': sysconfig,
                'macros': sysconfig['macros'],
            }))

        import yaml
        try:
            yaml.add_constructor(u'!eval', _eval_node, yaml.FullLoader)
            yaml.dg_load = functools.partial(yaml.load, Loader=yaml.FullLoader)
        except AttributeError:
            # Older versions of PyYAML don't have yaml.FullLoader, remove this
            # once we don't have to deal with those.
            yaml.add_constructor(u'!eval', _eval_node)
            yaml.dg_load = yaml.load

        return yaml
def load(contents):
    #example
    yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object)
    yaml.add_constructor(u"!ruby/sym", construct_ruby_sym)
    stream = contents
    mydata = yaml.load(stream)
    return mydata
Exemple #17
0
    def _load_config(self):
        """
        Load specified config file
        """
        try:
            with open(self.conf['config'], 'r') as fp:
                config = fp.read()
        except IOError as e:
            lg.error("Can't read config file %s: %s" % (self.conf['config'], e))
            raise

        # Register include constructors
        yaml.add_constructor('!include_dir', self._yaml_include_dir)
        yaml.add_constructor('!include', self._yaml_include)

        try:
            conf = yaml.load(config)
        except Exception as e:
            lg.error("Can't parse config file %s: %s" % (self.conf['config'], e))
            raise
        finally:
            fp.close()

        # Store parameters but don't overwite
        # those submitted by command line
        for key, value in conf.iteritems():
            if self.conf.has_key(key):
                # User has submitted own parameter,
                # use that instead of config one
                lg.debug("Using parameter %s from user, ignoring config file value" % key)
            else:
                self.conf[key] = value
Exemple #18
0
  def CompileFilter(self, filter_string):
    """Compile a set of ObjectFilters defined in an YAML file."""
    if not os.path.isfile(filter_string):
      raise errors.WrongPlugin((
          'ObjectFilterList requires an YAML file to be passed on, this filter '
          'string is not a file.'))

    yaml.add_constructor('!include', IncludeKeyword,
                         Loader=yaml.loader.SafeLoader)
    results = None

    with open(filter_string, 'rb') as fh:
      try:
        results = yaml.safe_load(fh)
      except (yaml.scanner.ScannerError, IOError) as exception:
        raise errors.WrongPlugin(
            u'Unable to parse YAML file with error: {0:s}.'.format(exception))

    self.filters = []
    results_type = type(results)
    if results_type is dict:
      self._ParseEntry(results)
    elif results_type is list:
      for result in results:
        if type(result) is not dict:
          raise errors.WrongPlugin(
              u'Wrong format of YAML file, entry not a dict ({})'.format(
                  results_type))
        self._ParseEntry(result)
    else:
      raise errors.WrongPlugin(
          u'Wrong format of YAML file, entry not a dict ({})'.format(
              results_type))
    self._filter_expression = filter_string
Exemple #19
0
def do_deploy(repo, deploy_file, target = None):
    repo = path.path(repo)
    if target is None:
        target = DigsbyDeployTarget(repo)

    def path_constructor(loader, node):
        if node.id == 'sequence':
            return repo.joinpath(*(loader.construct_sequence(node))).abspath()
        elif node.id == 'scalar':
            return (repo / loader.construct_scalar(node)).abspath()

    import digsby_phases
    yaml.add_constructor('!path', path_constructor)
    phases = yaml.load(open(deploy_file))

    for phase in phases:
        assert(len(phase) == 1)
        phase_name, phase_parts = phase.items()[0]
        for strat in phase_parts:
            ((strategy_name, options),) = strat.items()
            options = target.get_options(phase_name, strategy_name, options)
            with deploy.phase(phase_name, strategy_name, target, **options) as phase:
                phase.do()

    print('*** done ***')
Exemple #20
0
def __init_module__():
    yaml.add_representer(uuid.UUID, uuid_representer, Dumper=ConfigDumper)
    yaml.add_representer(ConfigPath, path_representer, Dumper=ConfigDumper)
    yaml.add_constructor('!uuid', uuid_constructor, Loader=ConfigLoader)
    yaml.add_constructor(
        '!create-if-needed', path_constructor, Loader=ConfigLoader)
    uuid_add_implicit_resolver()
Exemple #21
0
def get_yaml(path):
    """
    Read the file identified by `path` and import its YAML contents.

    :arg path: The path to a YAML configuration file.
    :rtype: dict
    """
    # Set the stage here to parse single scalar value environment vars from
    # the YAML file being read
    single = re.compile( r'^\$\{(.*)\}$' )
    yaml.add_implicit_resolver ( "!single", single )
    def single_constructor(loader,node):
        value = loader.construct_scalar(node)
        proto = single.match(value).group(1)
        default = None
        if len(proto.split(':')) > 1:
            envvar, default = proto.split(':')
        else:
            envvar = proto
        return os.environ[envvar] if envvar in os.environ else default
    yaml.add_constructor('!single', single_constructor)

    raw = read_file(path)
    try:
        cfg = yaml.load(raw)
    except yaml.scanner.ScannerError as e:
        raise ConfigurationError(
            'Unable to parse YAML file. Error: {0}'.format(e))
    return cfg
Exemple #22
0
 def _load_css_params(self, skin, skindir):
     yaml_path = os.path.join(skindir, "variables.yaml")
     if os.path.isfile(yaml_path):
         with open(yaml_path, 'r') as yamlfd:
             css_text = yamlfd.read()
         try:
             yaml.add_constructor('!secret', ha._secret_yaml, Loader=yaml.SafeLoader)
             css = yaml.load(css_text, Loader=yaml.SafeLoader)
         except yaml.YAMLError as exc:
             ha.log(self.logger, "WARNING", "Error loading CSS variables")
             if hasattr(exc, 'problem_mark'):
                 if exc.context is not None:
                     ha.log(self.logger, "WARNING", "parser says")
                     ha.log(self.logger, "WARNING", str(exc.problem_mark))
                     ha.log(self.logger, "WARNING", str(exc.problem) + " " + str(exc.context))
                 else:
                     ha.log(self.logger, "WARNING", "parser says")
                     ha.log(self.logger, "WARNING", str(exc.problem_mark))
                     ha.log(self.logger, "WARNING", str(exc.problem))
             return None
         if css is None:
             return {}
         else:
             return self._resolve_css_params(css, css)
     else:
         ha.log(self.logger, "WARNING", "Error loading variables.yaml for skin '{}'".format(skin))
         return None
Exemple #23
0
    def _exec_module(self, module):
        fullname = module.__name__

        if yaml is None:
            raise ImportError('PyYaml is not installed')

        class MyYamlLoader(YamlLoader):
            pass

        for tag, func in iteritems(self._defaults):
            @functools.wraps(func)
            def constructor(loader, node):
                return func(fullname, loader.construct_mapping(node))

            yaml.add_constructor(tag, constructor, Loader=MyYamlLoader)

        try:
            stream = file(self.get_filename(fullname), 'r')
            docs = yaml.load_all(stream, Loader=MyYamlLoader)

        except IOError:
            raise ImportError("IO error while reading a stream")
        except yaml.YAMLError as e:
            raise e  # XXX convert into SyntaxError

        else:
            for doc in docs:
                if not hasattr(doc, '__name__'):
                    continue
                setattr(module, doc.__name__, doc)
def main():
    usage = "usage: %prog [options] inputimage"
    parser = OptionParser(usage)
    parser.add_option("-o", "--output", dest="outputfilename",
                      help="write output image to this file instead to standard filename", metavar="FILE")
    parser.add_option("-v", "--verbose",
                      action="store_true", dest="verbose", default=True,
                      help="print status messages to stdout")
    (options, args) = parser.parse_args()

    if len(args) != 1 and len(args) != 4:
        parser.error("incorrect number of arguments")

    inputfilename = args[0]

    def mat_constructor(loader, node):
        return loader.construct_mapping(node)

    yaml.CLoader.add_constructor(u'tag:yaml.org,2002:opencv-matrix', mat_constructor)
    yaml.add_constructor(u'tag:yaml.org,2002:opencv-matrix', mat_constructor)


    try:
        stream = open(inputfilename)
    except IOError:
        parser.error("can't open input file \""+ inputfilename +"\"")

    #yamlfile = yaml.load(stream, Loader = yaml.CLoader)
    yamlfile = yaml.load(stream)
    mask = yamlfile['mask']

    im = Image.new("RGB", (mask['cols'], mask['rows']))

    lut = {}
    lut[0] = (0,0,0)
    lut[2] = (85,85,85)
    lut[1] = (170,170,170)
    lut[3] = (255,255,255)

    data = [ lut[x] for x in mask['data']  ]
    
    im.putdata(data)
    


    pixels = im.getdata()

    basename, extension = os.path.splitext(inputfilename)


    if options.outputfilename == None:
        outputfilename = basename + "_refined.bmp"
    else:
        outputfilename = options.outputfilename
        
    try:
        im.save(outputfilename)
    except IOError:
        parser.error("error in writing to \""+ outputfilename +"\"")
 def __init__(self, logger):
     """
     Initialize the class
     :param logger: Logging
     """
     super(YamlConfiguration, self).__init__()
     yaml.add_constructor('!include', self.__yaml_include)
     self.__logger = logger
Exemple #26
0
def main():
    yaml.add_constructor(u'!extractor', extractor_constructor)
    with open(args.config, 'r') as f:
        yaml_file = yaml.load(f)
    setup_dirs(yaml_file['app']['dirs'])
    config.dictConfig(yaml_file['logging'])
    articles = extract(yaml_file['app']['bases'])
    process_articles(articles)
Exemple #27
0
def __init_yaml():
    """Lazy init yaml because canmatrix might not be fully loaded when loading this format."""
    global _yaml_initialized
    if not _yaml_initialized:
        _yaml_initialized = True
        yaml.add_constructor(u'tag:yaml.org,2002:Frame', _frame_constructor)
        yaml.add_constructor(u'tag:yaml.org,2002:Signal', _signal_constructor)
        yaml.add_representer(canmatrix.Frame, _frame_representer)
Exemple #28
0
    def register_temp_stow_dir(cls):
        yaml_pattern = re.compile(r'^\<%= temp_stow_dir %\>$')
        cls.stow_root_dir = tempfile.mkdtemp(prefix='stow_root')
        yaml.add_implicit_resolver('!temp_stow_dir', yaml_pattern)

        def temp_stow_dir(loader, node):
            return cls.stow_root_dir
        yaml.add_constructor('!temp_stow_dir', temp_stow_dir)
Exemple #29
0
def doactualizalogpuppet(filetext):

    ahora=datetime.datetime.today()
    yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object)
    yaml.add_constructor(u"!ruby/sym", construct_ruby_sym)
    mydata = yaml.load(filetext)
    host=mydata["host"].upper()
    host=host[:host.index('.')]
    estadoglobal="OK"
    output = StringIO.StringIO()
    logs=mydata["logs"]
    output.write("<center><br><b>Logs de puppet</b><br><br>")   
    for item in logs:
       output.write("<table style='border: solid 1px #000000;width:95%;'>")
       if 'file' in item:
            output.write("<tr style='border: solid 1px #000000;'><td width='10%'>File</td><td>"+item['file']+":"+str(item['line'])+"</td></tr>")
                        
       output.write("<tr style='border: solid 1px #000000;'>")
       if item['level'] == 'err' :	   
           estadoglobal="ERROR"
           output.write("<td width='10%'><font color='red'>Level</font></td>")       
           output.write("<td><font color='red'>"+item['level']+"</font></td>")
       else:
           output.write("<td width='10%'>Level</td>")       
           output.write("<td>"+item['level']+"</td>")
       output.write("</tr>")           
       output.write("<tr><td width='10%'>Message</td><td>"+item['message']+"</td></tr>")
       output.write("</table><br>")
	   
    output.write("<br><b>Clases y recursos aplicados</b><br><br>")      
    recursos=mydata["resource_statuses"]
    output.write("<table style='border: solid 1px #000000;width:95%'>")
    for item in recursos:
        eventos=recursos[item]['events']
        descripcion=recursos[item]['source_description']
        estado="OK"
        for evento in eventos:
            valor=evento['status']
            if valor == "failure" :
                 estado="ERROR"
                 estadoglobal="ERROR"
                 break	 
        if estado=="OK":                         			 
            output.write("<tr style='border: solid 1px #000000;'><td width='90%'>"+ descripcion +"</td><td>"+estado+"</td></tr>")
        else:
            output.write("<tr style='border: solid 1px #000000;'><td width='90%'><font color='red'>"+ descripcion +"</font></td><td><font color='red'>"+estado+"</font></td></tr>")
    output.write("</table><br><br></center>")
    
    fila=cdb((cdb.maquinas.host.upper()==host) & (cdb.maquinas.tipohost!='WINDOWS')).select().last()    
    if fila==None:
        pass
    else:		
        #Movemos el puntero del fichero al comienzo del mismo, ya que si no no se vuelca a la tabla.    
        output.seek(0)   
        fila.update_record(ultimopuppet=ahora,estadopuppet=estadoglobal,logpuppet=cdb.maquinas.logpuppet.store(output,filename=host))
    output.close()
        
    return "OK"
Exemple #30
0
def main():
    usage = "usage: %prog mask gtmask"
    parser = OptionParser(usage)
    (options, args) = parser.parse_args()

    if len(args) != 2:
        parser.error("incorrect number of arguments")

    filename = args[0]
    gtfilename = args[1]

    def mat_constructor(loader, node):
        return loader.construct_mapping(node)

    yaml.CLoader.add_constructor(u'tag:yaml.org,2002:opencv-matrix', mat_constructor)
    yaml.add_constructor(u'tag:yaml.org,2002:opencv-matrix', mat_constructor)


    try:
        stream = open(filename)
    except IOError:
        parser.error("can't open input file \""+ filename +"\"")

    yamlfile = yaml.load(stream, Loader = yaml.CLoader)
    #yamlfile = yaml.load(stream)
    mask = yamlfile['mask']

    try:
        stream = open(gtfilename)
    except IOError:
        parser.error("can't open input file \""+ gtfillename +"\"")

    yamlfile = yaml.load(stream, Loader = yaml.CLoader)
    #yamlfile = yaml.load(stream)
    gtmask = yamlfile['mask']

    if mask['cols'] != gtmask['cols'] or mask['rows'] != gtmask['rows']:
        parser.error("image size is not identical")

    count_correct = 0
    count_wrongforeground = 0
    count_wrongbackground = 0

    for i in xrange(mask['cols'] * mask['rows']):
        value = mask['data'][i]
        gtvalue = gtmask['data'][i]
        if (value & 1) == (gtvalue & 1):
            count_correct += 1
        elif (gtvalue & 1) == 0:
            count_wrongbackground += 1
        else:
            count_wrongforeground += 1

    correct = count_correct / float( mask['rows'] * mask['cols'] )
    wrongforeground = count_wrongforeground / float( mask['rows'] * mask['cols'] )
    wrongbackground = count_wrongbackground / float( mask['rows'] * mask['cols'] )
            
    print "correct: %f,   wrongly classified foreground pixels: %f,   wrongly classified background pixels: %f" % (correct, wrongforeground, wrongbackground)
Exemple #31
0
def connect(db_config):
    return pymysql.connect(
        charset=db_config['charset'],
        cursorclass=pymysql.cursors.DictCursor,
        db=db_config['database'],
        host=db_config['host'],
        password=db_config['password'],
        port=3306,
        user=db_config['username']
    )

if __name__ == "__main__":
    pattern = re.compile(r'^<%= ENV\[\'(.*)\'\] %>$')
    yaml.add_implicit_resolver('!env_regex', pattern)

    def env_regex(loader, node):
        value = loader.construct_scalar(node)
        var = pattern.match(value).groups()[0]
        print "VAR is %s" % var
        return os.environ[var]

    yaml.add_constructor('!env_regex', env_regex)

    passed_args = sys.argv
    with open(passed_args[2], 'r') as f:
        config = yaml.load(f)

    # only using debug=True for the nice auto-reload on change feature
    environ = passed_args[1]
    app.run(debug=True, port=int(config[environ]['web']['port']), host=str(config[environ]['web']['listen']))
Exemple #32
0
logger = logging.getLogger(__name__)

_yaml_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def _dict_representer(dumper, data):
    return dumper.represent_dict(iter(data.items()))


def _dict_constructor(loader, node):
    return collections.OrderedDict(loader.construct_pairs(node))


yaml.add_representer(collections.OrderedDict, _dict_representer)
yaml.add_constructor(_yaml_mapping_tag, _dict_constructor)


def writeToFile(filename, the_bytes):
    try:
        with open(filename, "wb") as fh:
            fh.write(the_bytes)
    except Exception as err:
        raise FileSystemError("Failed to write to %s : %s" % (filename, err))


def writeYAML(filename, data):
    try:
        with open(filename, "w") as fileh:
            fileh.write(yaml.dump(data, default_flow_style=False))
    except Exception as err:
Exemple #33
0
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    return dumper.represent_dict(
        data.iteritems() if hasattr(data, "iteritems") else data.items())


def dict_constructor(loader, node):
    return collections.OrderedDict(loader.construct_pairs(node))


yaml.add_representer(collections.OrderedDict, dict_representer,
                     yaml.SafeDumper)
yaml.add_constructor(_mapping_tag, dict_constructor, yaml.SafeLoader)


def check_memory_usage(bytes_needed, confirm):
    if bytes_needed > psutil.virtual_memory().available:
        if bytes_needed < (psutil.virtual_memory().available +
                           psutil.swap_memory().free):
            text = "Action requires %s, you have enough swap memory available but it will make your computer slower, do you want to continue?" % (
                filesize_format(bytes_needed), )
            return confirm("Memory usage issue", text)
        else:
            text = "Action requires %s, you do not have enough swap memory available, do you want try anyway?" % (
                filesize_format(bytes_needed), )
            return confirm("Memory usage issue", text)
    return True
Exemple #34
0
import yaml

from .vault import vault_constructor
from .kms import kms_simple_constructor

yaml.add_constructor('!vault', vault_constructor)
yaml.add_constructor('!kms', kms_simple_constructor)
Exemple #35
0
def yaml_join(loader, node):
    """
    defines custom YAML join function.
    see http://stackoverflow.com/questions/5484016/how-can-i-do-string-concatenation-or-string-replacement-in-yaml/23212524#23212524
    @param loader: the YAML Loader
    @param node: the YAML (sequence) node
    """
    seq = loader.construct_sequence(node)
    return ''.join([str(i) for i in seq])


try:
    import yaml
    # register the tag handlers
    yaml.add_constructor('!join', yaml_join)
except ImportError:
    pass


class FormatYeb(EasyConfigFormat):
    """Support for easyconfig YAML format"""
    USABLE = True

    def __init__(self):
        """FormatYeb constructor"""
        super(FormatYeb, self).__init__()
        self.log.experimental("Parsing .yeb easyconfigs")

    def validate(self):
        """Format validation"""
Exemple #36
0
class Loader(yaml.SafeLoader):
    """YAML Loader with `!include` constructor."""
    def __init__(self, stream: IO) -> None:
        """Initialise Loader."""

        try:
            self._root = os.path.split(stream.name)[0]
        except AttributeError:
            self._root = os.path.curdir

        super().__init__(stream)


def construct_include(loader: Loader, node: yaml.Node) -> Any:
    """Include file referenced at node."""

    filename = os.path.abspath(
        os.path.join(loader._root, loader.construct_scalar(node)))
    extension = os.path.splitext(filename)[1].lstrip('.')

    with open(filename, 'r') as f:
        if extension in ('yaml', 'yml'):
            return yaml.load(f, Loader)
        elif extension in ('json', ):
            return json.load(f)
        else:
            return ''.join(f.readlines())


yaml.add_constructor('!include', construct_include, Loader)
Exemple #37
0




class Data(yaml.YAMLObject):
    yaml_tag = '!Data'
    @classmethod
    def from_yaml(cls, loader, node):
        data = loader.construct_mapping(node)
        data['__class'] = True
        return data



yaml.add_constructor('tag:yaml.org,2002:map', lambda l, n: collections.OrderedDict(l.construct_pairs(n)))
# class RPGLoader(yaml.Loader):
#     typelist = []
#     def __init__(self, stream):
#         super().__init__(stream)
#         self.inheritance_helper = RPGLoader.Stack([object])

#     class Stack(list):
#         def push(self, obj):
#             self.append(obj)
#         @property
#         def top(self):
#             return self[-1]

#     def construct_mapping(self, node):
#         parents = self.inheritance_helper
Exemple #38
0
            >>> yaml.safe_dump(b, default_flow_style=True)
            '{foo: [bar, {lol: true}], hello: 42}\\n'
        """
        return dumper.represent_dict(data)

    def to_yaml(dumper, data):
        """ Converts Bunch to a representation node.
            
            >>> b = Bunch(foo=['bar', Bunch(lol=True)], hello=42)
            >>> import yaml
            >>> yaml.dump(b, default_flow_style=True)
            '!bunch.Bunch {foo: [bar, !bunch.Bunch {lol: true}], hello: 42}\\n'
        """
        return dumper.represent_mapping(u'!bunch.Bunch', data)

    yaml.add_constructor(u'!bunch', from_yaml)
    yaml.add_constructor(u'!bunch.Bunch', from_yaml)

    SafeRepresenter.add_representer(Bunch, to_yaml_safe)
    SafeRepresenter.add_multi_representer(Bunch, to_yaml_safe)

    Representer.add_representer(Bunch, to_yaml)
    Representer.add_multi_representer(Bunch, to_yaml)

    # Instance methods for YAML conversion
    def toYAML(self, **options):
        """ Serializes this Bunch to YAML, using `yaml.safe_dump()` if 
            no `Dumper` is provided. See the PyYAML documentation for more info.
            
            >>> b = Bunch(foo=['bar', Bunch(lol=True)], hello=42)
            >>> import yaml
Exemple #39
0
def load_widget(dash, includes, name, css_vars, global_parameters):
    instantiated_widget = None
    #
    # Check if we have already encountered a definition
    #
    for include in includes:
        if name in include:
            instantiated_widget = include[name]
    #
    # If not, go find it elsewhere
    #
    if instantiated_widget is None:
        # Try to find in in a yaml file
        yaml_path = os.path.join(conf.dashboard_dir, "{}.yaml".format(name))
        if os.path.isfile(yaml_path):
            with open(yaml_path, 'r') as yamlfd:
                widget = yamlfd.read()
            try:
                yaml.add_constructor('!secret', ha._secret_yaml)
                instantiated_widget = yaml.load(widget)
            except yaml.YAMLError as exc:
                log_error(
                    dash, name,
                    "Error while parsing dashboard '{}':".format(yaml_path))
                if hasattr(exc, 'problem_mark'):
                    if exc.context is not None:
                        log_error(dash, name, "parser says")
                        log_error(dash, name, str(exc.problem_mark))
                        log_error(dash, name,
                                  str(exc.problem) + " " + str(exc.context))
                    else:
                        log_error(dash, name, "parser says")
                        log_error(dash, name, str(exc.problem_mark))
                        log_error(dash, name, str(exc.problem))
                return {"widget_type": "text", "title": "Error loading widget"}

        elif name.find(".") != -1:
            #
            # No file, check if it is implicitly defined via an entity id
            #
            parts = name.split(".")
            instantiated_widget = {
                "widget_type": parts[0],
                "entity": name,
                "title_is_friendly_name": 1
            }
        else:
            ha.log(conf.dash, "WARNING",
                   "Unable to find widget definition for '{}'".format(name))
            # Return some valid data so the browser will render a blank widget
            return {
                "widget_type": "text",
                "title": "Widget definition not found"
            }

    widget_type = None
    try:
        if "widget_type" not in instantiated_widget:
            return {
                "widget_type": "text",
                "title": "Widget type not specified"
            }

        #
        # One way or another we now have the widget definition
        #
        widget_type = instantiated_widget["widget_type"]

        if widget_type == "text_sensor":
            ha.log(
                conf.dash, "WARNING",
                "'text_sensor' widget is deprecated, please use 'sensor' instead for widget '{}'"
                .format(name))

        # Check for custom base widgets first
        if os.path.isdir(
                os.path.join(conf.config_dir, "custom_widgets", widget_type)):
            # This is a custom base widget so return it in full
            return expand_vars(instantiated_widget, css_vars)

        # Now regular base widgets
        if os.path.isdir(os.path.join(conf.dash_dir, "widgets", widget_type)):
            # This is a base widget so return it in full
            return expand_vars(instantiated_widget, css_vars)

        # We are working with a derived widget so we need to do some merges and substitutions

        # first check for custom widget

        yaml_path = os.path.join(conf.config_dir, "custom_widgets",
                                 "{}.yaml".format(widget_type))
        if not os.path.isfile(yaml_path):
            yaml_path = os.path.join(conf.dash_dir, "widgets",
                                     "{}.yaml".format(widget_type))

        #
        # Variable substitutions
        #
        yaml_file, templates = do_subs(yaml_path, instantiated_widget, '""')

        try:
            #
            # Parse the substituted YAML file - this is a derived widget definition
            #
            yaml.add_constructor('!secret', ha._secret_yaml)
            final_widget = yaml.load(yaml_file)
        except yaml.YAMLError as exc:
            log_error(dash, name,
                      "Error in widget definition '{}':".format(widget_type))
            if hasattr(exc, 'problem_mark'):
                if exc.context is not None:
                    log_error(dash, name, "parser says")
                    log_error(dash, name, str(exc.problem_mark))
                    log_error(dash, name,
                              str(exc.problem) + " " + str(exc.context))
                else:
                    log_error(dash, name, "parser says")
                    log_error(dash, name, str(exc.problem_mark))
                    log_error(dash, name, str(exc.problem))
            return {
                "widget_type": "text",
                "title": "Error loading widget definition"
            }

        #
        # Add in global params
        #
        if global_parameters is not None:
            for key in global_parameters:
                final_widget[key] = global_parameters[key]

        #
        # Override defaults with parameters in users definition
        #
        for key in instantiated_widget:
            if key != "widget_type" and key not in templates:
                # if it is an existing key and it is a style attribute, prepend, don't overwrite
                if key in final_widget and key.find("style") != -1:
                    # if it is an existing key and it is a style attirpute, prepend, don't overwrite
                    final_widget[key] = final_widget[
                        key] + ";" + instantiated_widget[key]
                else:
                    final_widget[key] = instantiated_widget[key]
                if "css" in final_widget and key in final_widget["css"]:
                    final_widget["css"][key] = final_widget["css"][
                        key] + ";" + instantiated_widget[key]
                if "static_css" in final_widget and key in final_widget[
                        "static_css"]:
                    final_widget["static_css"][key] = final_widget[
                        "static_css"][key] + ";" + instantiated_widget[key]
                if "icons" in final_widget and key in final_widget["icons"]:
                    final_widget["icons"][key] = instantiated_widget[key]
                if "static_icons" in final_widget and key in final_widget[
                        "static_icons"]:
                    final_widget["static_icons"][key] = instantiated_widget[
                        key]

        #
        # Process variables from skin
        #
        final_widget = expand_vars(final_widget, css_vars)
        #
        # Merge styles
        #
        final_widget = merge_styles(final_widget, name)

        return final_widget
    except FileNotFoundError:
        ha.log(conf.dash, "WARNING",
               "Unable to find widget type '{}'".format(widget_type))
        # Return some valid data so the browser will render a blank widget
        return {"widget_type": "text", "title": "Widget type not found"}
Exemple #40
0
from marmot.evaluation.evaluation_metrics import weighted_fmeasure

logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
                    level=logging.INFO)
logger = logging.getLogger('testlogger')


# define custom tag handler to join paths with the path of the word_level module
def join_with_module_path(loader, node):
    module_path = os.path.dirname(marmot.__file__)
    resolved = loader.construct_scalar(node)
    return os.path.join(module_path, resolved)


## register the tag handler
yaml.add_constructor('!join', join_with_module_path)


def main(config):
    # load ContextCreators from config file, run their input functions, and pass the result into the initialization function
    # init() all context creators specified by the user with their arguments
    # import them according to their fully-specified class names in the config file
    # it's up to the user to specify context creators which extract both negative and positive examples (if that's what they want)

    # Chris - working - we want to hit every token
    interesting_tokens = experiment_utils.import_and_call_function(
        config['interesting_tokens'])
    print "INTERESTING TOKENS: ", interesting_tokens
    logger.info('The number of interesting tokens is: ' +
                str(len(interesting_tokens)))
    workers = config['workers']
Exemple #41
0
from snapcraft import shell_utils  # noqa
from snapcraft.internal import repo  # noqa

# Setup yaml module globally
# yaml OrderedDict loading and dumping
# from http://stackoverflow.com/a/21048064 Wed Jun 22 16:05:34 UTC 2016
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG


def dict_representer(dumper, data):
    return dumper.represent_dict(data.items())


def dict_constructor(loader, node):
    # Necessary in order to make yaml merge tags work
    loader.flatten_mapping(node)
    return OrderedDict(loader.construct_pairs(node))


def str_presenter(dumper, data):
    if len(data.splitlines()) > 1:  # check for multiline string
        return dumper.represent_scalar('tag:yaml.org,2002:str',
                                       data,
                                       style='|')
    return dumper.represent_scalar('tag:yaml.org,2002:str', data)


yaml.add_representer(str, str_presenter)
yaml.add_representer(OrderedDict, dict_representer)
yaml.add_constructor(_mapping_tag, dict_constructor)
Exemple #42
0
def _load_dash(name, extension, layout, occupied, includes, level, css_vars,
               global_parameters):
    if extension == "dash":
        dash = {
            "title": "HADashboard",
            "widget_dimensions": [120, 120],
            "widget_margins": [5, 5],
            "columns": 8
        }
    else:
        dash = {}

    dash["widgets"] = []
    dash["errors"] = []
    valid_params = [
        "title", "widget_dimensions", "widget_margins", "columns",
        "widget_size", "rows"
    ]
    layouts = []

    if level > conf.max_include_depth:
        log_error(
            dash, name, "Maximum include level reached ({})".format(
                conf.max_include_depth))
        return dash, layout, occupied, includes

    dashfile = os.path.join(conf.dashboard_dir,
                            "{}.{}".format(name, extension))
    page = "default"

    try:
        with open(dashfile, 'r') as yamlfd:
            defs = yamlfd.read()
    except:
        log_error(dash, name,
                  "Error opening dashboard file '{}'".format(dashfile))
        return dash, layout, occupied, includes

    try:
        yaml.add_constructor('!secret', ha._secret_yaml)
        dash_params = yaml.load(defs)
    except yaml.YAMLError as exc:
        log_error(dash, name,
                  "Error while parsing dashboard '{}':".format(dashfile))
        if hasattr(exc, 'problem_mark'):
            if exc.context is not None:
                log_error(dash, name, "parser says")
                log_error(dash, name, str(exc.problem_mark))
                log_error(dash, name,
                          str(exc.problem) + " " + str(exc.context))
            else:
                log_error(dash, name, "parser says")
                log_error(dash, name, str(exc.problem_mark))
                log_error(dash, name, str(exc.problem))
        else:
            log_error(dash, name,
                      "Something went wrong while parsing dashboard file")

        return dash, layout, occupied, includes
    if dash_params is not None:
        if "global_parameters" in dash_params:
            if extension == "dash":
                global_parameters = dash_params["global_parameters"]
            else:
                ha.log(
                    conf.dash, "WARNING",
                    "global_parameters dashboard directive illegal in imported dashboard '{}.{}'"
                    .format(name, extension))

        for param in dash_params:
            if param == "layout" and dash_params[param] is not None:
                for lay in dash_params[param]:
                    layouts.append(lay)
            elif param in valid_params:
                if extension == "dash":
                    dash[param] = dash_params[param]
                else:
                    ha.log(
                        conf.dash, "WARNING",
                        "Top level dashboard directive illegal in imported dashboard '{}.{}': {}: {}"
                        .format(name, extension, param, dash_params[param]))
            else:
                includes.append({param: dash_params[param]})

        for lay in layouts:
            if isinstance(lay, dict):
                if "include" in lay:
                    new_dash, layout, occupied, includes = _load_dash(
                        os.path.join(conf.dashboard_dir,
                                     lay["include"]), "yaml", layout, occupied,
                        includes, level + 1, css_vars, global_parameters)
                    if new_dash is not None:
                        merge_dashes(dash, new_dash)
                elif "empty" in lay:
                    layout += lay["empty"]
                else:
                    log_error(
                        dash, name,
                        "Incorrect directive, should be 'include or empty': {}"
                        .format(lay))
            else:
                layout += 1
                add_layout(lay, layout, occupied, dash, page, includes,
                           css_vars, global_parameters)

    return dash, layout, occupied, includes
Exemple #43
0
def __setup_yaml_module():
    # preserve order while loading the YAML file by using an OrderedDict instead of a Dict
    # http://stackoverflow.com/a/21048064/3034356
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
    yaml.add_constructor(_mapping_tag, __yaml_override_constructor)

    # pyyaml has problems with the CloudFormation functions (such as !Sub)
    # we 'remove' these functions by overriding the constructor again for these functions
    # http://stackoverflow.com/questions/7224033/default-constructor-parameters-in-pyyaml
    yaml.add_constructor('!And', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Base64', __yaml_override_cloudformation_function)
    yaml.add_constructor('!GetAtt', __yaml_override_cloudformation_function)
    yaml.add_constructor('!GetAZs', __yaml_override_cloudformation_function)
    yaml.add_constructor('!FindInMap', __yaml_override_cloudformation_function)
    yaml.add_constructor('!If', __yaml_override_cloudformation_function)
    yaml.add_constructor('!ImportValue',
                         __yaml_override_cloudformation_function)
    yaml.add_constructor('!Join', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Equals', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Ref', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Select', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Split', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Sub', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Not', __yaml_override_cloudformation_function)
    yaml.add_constructor('!Or', __yaml_override_cloudformation_function)
 def load(self, para_file):
     yaml.add_constructor('!join', self._concat)
     fin = open(para_file, 'r')
     # using default dict: if the key is not specified, the values is None
     return defaultdict(lambda: None, yaml.load(fin))
Exemple #45
0
  !resource [<path>/]<filename>
    Copies the file lovelace/<path><filename> to www/lovelace/<filename> and is replaced with /local/lovelace/<filename>
"""


def include_statement(loader, node):
    global indir, states
    filename = loader.construct_scalar(node)
    with open("{}/{}".format(indir, filename), 'r') as fp:
        data = fp.read()
    template = jinja2.Template(data)
    retval = yaml.load(template.render(states=states))
    return retval


yaml.add_constructor('!include', include_statement)


def secret_statement(loader, node):
    with open(secretsfile, 'r') as fp:
        data = fp.read()
    data = yaml.load(data)
    if not node.value in data:
        raise yaml.scanner.ScannerError('Could not find secret {}'.format(
            node.value))
    return data[node.value]


yaml.add_constructor('!secret', secret_statement)

Exemple #46
0

def pathex_constructor(loader, node):
    precedingPath = loader.construct_scalar(node)
    output_strings = []
    while ('${' in precedingPath) and ('}' in precedingPath):
        precedingPath, envVar, remainingPath = pattern.match(
            precedingPath).groups()
        output_strings.append(remainingPath)
        output_strings.append(os.environ[envVar])
    output_strings.append(precedingPath)
    return ''.join(output_strings[::-1])


add_implicit_resolver("!pathex", pattern)
add_constructor('!pathex', pathex_constructor)


def load_yaml(filename_or_string_input,
              env_vars=False,
              complicated_input=False,
              string_input=False):

    if string_input:
        input_string = filename_or_string_input
    else:
        with open(filename_or_string_input) as descr:
            input_string = descr.read()
    if env_vars or complicated_input:
        dico = load(input_string, Loader=Loader)
    else:
Exemple #47
0
import re


def extend_constructor(loader, node):
    """
    Return an ExtendObject. This will be used in a post-processing step
    to extend the base object.
    """
    return ExtendObject(node)


# Register the '<<<' syntax with our custom constructor
yaml.add_implicit_resolver('tag:opengis.ch,2016:extend',
                           re.compile(r'^(?:<<<)$'), ['<'])

yaml.add_constructor('tag:opengis.ch,2016:extend', extend_constructor)


class ExtendObject(object):
    """
    The ExtendObject will be used as a placeholder for the loading time
    """
    def __init__(self, data):
        self.data = data


class YamlReaderError(RuntimeError):
    pass


class InheritanceLoader(yaml.Loader):
Exemple #48
0
def include_constructor(self, node):
    """
    constructor:
      parses the !include relative_file_path
      loads the file from relative_file_path and insert the values into the original file
    """
    filepath = self.construct_scalar(node)
    if filepath[-1] == ',':
        filepath = filepath[:-1]
    filename = os.path.join(self._root, filepath)
    with open(filename, 'r') as f:
        return yaml.load(f, accelergy_loader)


yaml.add_constructor('!include', include_constructor, accelergy_loader)


def includedir_constructor(self, node):
    """
    constructor:
      parses the !includedir relative_file_path
      loads the file from relative_file_path and insert the values into the original file
    """
    filepath = self.construct_scalar(node)
    if filepath[-1] == ',':
        filepath = filepath[:-1]
    dirname = os.path.join(self._root, filepath)
    yamllist = []
    for filename in glob.glob(dirname + "/*.yaml"):
        with open(filename, 'r') as f:
Exemple #49
0
# Author Ken Conley/[email protected]
"""
Base API for loading rosdep information by package or stack name.
This API is decoupled from the ROS packaging system to enable multiple
implementations of rosdep, including ones that don't rely on the ROS
packaging system.  This is necessary, for example, to implement a
version of rosdep that works against tarballs of released stacks.
"""

import yaml

from .core import InvalidData

ROSDEP_YAML = 'rosdep.yaml'

yaml.add_constructor(u'tag:yaml.org,2002:float',
                     yaml.constructor.Constructor.construct_yaml_str)


class RosdepLoader:
    """
    Base API for loading rosdep information by package or stack name.  
    """
    def load_rosdep_yaml(self, yaml_contents, origin):
        """
        Utility routine for unmarshalling rosdep data encoded as YAML.

        :param origin: origin of yaml contents (for error messages)
        :raises: :exc:`yaml.YAMLError`
        """
        try:
            return yaml.load(yaml_contents)
# pass a variable from python manage.py to here
env = os.environ.get('APP_ENV', '')

CONFIG_DIR = BASE_DIR + "/config/"

pattern = re.compile(r'^\<%= ENV\[\'(.*)\'\] %\>(.*)$')
yaml.add_implicit_resolver("!pathex", pattern)


def pathex_constructor(loader, node):
    value = loader.construct_scalar(node)
    env_var, remaining_path = pattern.match(value).groups()
    return os.environ[env_var] + remaining_path


yaml.add_constructor('!pathex', pathex_constructor)

# Load Default Config File
with open(CONFIG_DIR + "config.yml", 'r') as ymlfile:
    default_cfg = yaml.load(ymlfile)

# Attempt a custom Config File
try:
    with open(CONFIG_DIR + "config-" + env + ".yml", 'r') as ymlfile:
        custom_cfg = yaml.load(ymlfile)
except IOError:
    custom_cfg = {}

default_cfg.update(custom_cfg)

print(default_cfg)
Exemple #51
0
def register_tag(tag, classpath):
    yaml.add_constructor('!' + tag, metaloader(classpath))
    yaml.add_constructor('tag:nltk.org,2011:' + tag, metaloader(classpath))
def setup_yaml_parser():
    yaml.add_constructor('!env_var', env_var_constructor)
    yaml.add_constructor('!raw_env_var', partial(env_var_constructor,
                                                 raw=True))
    yaml.add_implicit_resolver('!env_var', IMPLICIT_ENV_VAR_MATCHER)
Exemple #53
0
DEFAULT_CONFIG_PATH = os.path.join(
    os.path.dirname(os.path.abspath(__file__)),
    'config.yaml')


def load_deb_base_url(loader, node):
    base_url, comp = node.value.rsplit(' ', 1)
    return deb_base_url(base_url, comp)


def load_rpm_base_url(loader, node):
    return rpm_base_url(node.value)


def load_regex(loader, node):
    return re.compile(node.value)


yaml.add_constructor(
    u'!deb_base_url', load_deb_base_url, Loader=yaml.SafeLoader)
yaml.add_constructor(
    u'!rpm_base_url', load_rpm_base_url, Loader=yaml.SafeLoader)
yaml.add_constructor(
    u'!regular_expression', load_regex, Loader=yaml.SafeLoader)


def load_config(path=None):
    with open(path or DEFAULT_CONFIG_PATH) as f:
        return yaml.safe_load(f)
Exemple #54
0
reference_names = (
    u"Parution au JO",
    u"Références BOI",
    u"Références législatives",
    u"Références législatives - définition des ressources et plafonds",
    u"Références législatives - revalorisation des plafonds",
    u"Références législatives des règles de calcul et du paramètre Po",
    u"Références législatives de tous les autres paramètres",
)


def dict_constructor(loader, node):
    return collections.OrderedDict(loader.construct_pairs(node))


yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                     dict_constructor)


def build_tree_from_yaml_clean(yaml_dir):
    tree = collections.OrderedDict()
    for yaml_dir_encoded, _, filenames_encoded in os.walk(yaml_dir):
        for filename_encoded in sorted(filenames_encoded):
            if not filename_encoded.endswith('.yaml'):
                continue
            filename = filename_encoded.decode(file_system_encoding)
            sheet_name = os.path.splitext(filename)[0]
            yaml_file_path_encoded = os.path.join(yaml_dir_encoded,
                                                  filename_encoded)
            relative_file_path_encoded = yaml_file_path_encoded[len(yaml_dir
                                                                    ):].lstrip(
                                                                        os.sep)
    def __init__(self, yaml_file, release="current"):
        # First add the include constructor to be able to share config values
        yaml.add_constructor('!include', construct_include, Loader)
        yaml_config = yaml.load(yaml_file, Loader)
        if "v1" in yaml_config:
            # v1 config
            config = yaml_config["v1"]
            for key in config["cwt"]:
                config[key] = config["cwt"][key]
        else:
            # v0 config, no changes needed
            config = yaml_config
        for key in config[release]:
            self[key] = config[release][key]

        urls = config["urls"]
        images = config["images"]
        self["layers"] = config["layer_ordering"]
        self["packager_util"] = config["packager_utils"]
        self["hostname_url"] = config.get("hostname_url", "")
        self["product"] = config.get("product", "")
        self["image_names"] = config.get("image_names", "")
        self["rebuild_reason"] = config.get("rebuild_reason", "")
        self["ignore_files"] = config["ignore_files"]
        self["groups"] = config.get("groups", {})
        self["mails"] = config.get("mails", {})
        self["df_ext"] = config.get("df_ext", ".fedora")
        self["raw"] = config
        commands = config.get("commands", {})
        # Parse the image layers
        for (layer_id, image_list) in self["image_sets"].items():
            result = []
            if not image_list:
                # Empty image list (possbly redefined), just append empty list
                self[layer_id] = result
                continue

            for i in image_list:
                t = "build_tag"
                image = images[i]
                image["name"] = i
                image["git_url"] = urls[image["git_url"]]
                b = image["git_branch"]
                # Use the release branch if no future branches provided
                fb = image["git_future"] if "git_future" in image else b
                # Use global commands, if does not exist per image
                image_commands = image.get("commands", {})
                image["commands"] = commands.copy()
                image["commands"].update(image_commands)
                # Use global build tag if no image specific is provided
                tag = image[t] if t in image else self[t]
                if "releases" in self:
                    for r in self["releases"].values():
                        # Replace release IDs in branches
                        if r["id"] in b:
                            b = b.replace(r["id"], r["current"])
                        if r["id"] in fb:
                            # TODO: What if there are multiple future releases?
                            fb = fb.replace(r["id"], r["future"][0])
                        # Create build tag from release
                        if r["id"] in tag:
                            image[t] = tag.replace(r["id"], r["current"])

                image["git_branch"] = b
                image["git_future"] = fb
                if "namespace" not in image:
                    image["namespace"] = self.get("namespace", "")
                result.append(image)

            self[layer_id] = result
Exemple #56
0
    return node.value


def dict_no_duplicate_constructor(loader, node, deep=False):
    keys = [key.value for key, value in node.value]

    if len(keys) != len(set(keys)):
        duplicate = next((key for key in keys if keys.count(key) > 1))
        raise yaml.parser.ParserError('', node.start_mark,
                                      f"Found duplicate key '{duplicate}'")

    return loader.construct_mapping(node, deep)


yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                     dict_no_duplicate_constructor,
                     Loader=Loader)
yaml.add_constructor('tag:yaml.org,2002:timestamp',
                     date_constructor,
                     Loader=Loader)


class ParameterNotFound(AttributeError):
    """
        Exception raised when a parameter is not found in the parameters.
    """
    def __init__(self, name, instant_str, variable_name=None):
        """
        :param name: Name of the parameter
        :param instant_str: Instant where the parameter does not exist, in the format `YYYY-MM-DD`.
        :param variable_name: If the parameter was queried during the computation of a variable, name of that variable.
Exemple #57
0
def add_constructors():
    yaml.add_constructor("!assert", assert_constructor)
    yaml.add_constructor("!record", record_constructor)
    yaml.add_constructor("!python", python_constructor)
    yaml.add_constructor("!menuitem", menuitem_constructor)
    yaml.add_constructor("!workflow", workflow_constructor)
    yaml.add_constructor("!act_window", act_window_constructor)
    yaml.add_constructor("!function", function_constructor)
    yaml.add_constructor("!report", report_constructor)
    yaml.add_constructor("!context", context_constructor)
    yaml.add_constructor("!delete", delete_constructor)
    yaml.add_constructor("!url", url_constructor)
    yaml.add_constructor("!eval", eval_constructor)
    yaml.add_multi_constructor("!ref", ref_constructor)
    yaml.add_constructor("!ir_set", ir_set_constructor)
Exemple #58
0
        filename_with_path (str): the YAML filename with an absolute path
    """
    try:
        with open(filename_with_path) as config_file:
            Module.temp_path = filename_with_path
            this_module = yaml.load(config_file, Loader=Loader)
            Module.temp_path = ""
            return this_module
    except IOError:
        raise ModulePathError(filename_with_path)
    except yaml.scanner.ScannerError:
        raise ModuleConstraintParseError(
            "Parsing of module {} failed. This is likely caused by a typo in the file."
            "".format(filename_with_path))


# Add the YAML Module constructor so that YAML knows to use it in situations where the tag matches.
yaml.add_constructor("!ec2rlcore.module.Module",
                     module_constructor,
                     Loader=Loader)


class SkipReason:
    NOT_AN_EC2_INSTANCE = "NOT_AN_EC2_INSTANCE"
    NOT_APPLICABLE_TO_DISTRO = "NOT_APPLICABLE_TO_DISTRO"
    PERFORMANCE_IMPACT = "PERFORMANCE_IMPACT"
    REQUIRES_SUDO = "REQUIRES_SUDO"
    NOT_SELECTED = "NOT_SELECTED"
    MISSING_SOFTWARE = "MISSING_SOFTWARE"
    MISSING_ARGUMENT = "MISSING_ARGUMENT"
Exemple #59
0
            best_style = False
        if not (isinstance(node_value, yaml.ScalarNode)
                and not node_value.style):
            best_style = False
        value.append((node_key, node_value))
    if flow_style is None:
        if dump.default_flow_style is not None:
            node.flow_style = dump.default_flow_style
        else:
            node.flow_style = best_style
    return node


##### Register

yaml.add_constructor(u'tag:yaml.org,2002:omap', _construct_odict)
yaml.constructor.SafeConstructor.add_constructor(u'tag:yaml.org,2002:omap',
                                                 _construct_odict)
yaml.add_constructor(u'tag:yaml.org,2002:map', _construct_mapping)
yaml.constructor.SafeConstructor.add_constructor(u'tag:yaml.org,2002:map',
                                                 _construct_mapping)

# Note: Register these two if behavior according to standard is desired, where OrderedDict
#       gets mapped to omap.
#yaml.representer.SafeRepresenter.add_representer(collections.OrderedDict, _repr_odict)
#yaml.add_representer(collections.OrderedDict, _repr_odict)

# Note: Register these two to map the OrderedDict to yaml mapping. Note that standard
#       conforming implementations are not required to keep the ordering when they
#       read the file.
yaml.representer.SafeRepresenter.add_representer(collections.OrderedDict,
Exemple #60
0
import json
import os
import re
import shutil
import subprocess
import yaml


# This is here because of a bug that causes yaml
# to incorrectly handle timezone info on timestamps
def timestamp_constructor(_, node):
    '''return timestamps as strings'''
    return str(node.value)


yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)


class OpenShiftCLIError(Exception):
    '''Exception class for openshiftcli'''
    pass


# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
    ''' Class to wrap the command line tools '''
    def __init__(self,
                 namespace,
                 kubeconfig='/etc/origin/master/admin.kubeconfig',
                 verbose=False,
                 all_namespaces=False):