def read_callback(): yaml.add_multi_constructor("!", identity) logger('verb', "starting run") for report_dir in os.listdir(PuppetReportsConfig.reports_dir): logger('verb', "parsing: %s" % report_dir) reports_dir = os.listdir(PuppetReportsConfig.reports_dir + '/' + report_dir) reports_dir.sort last_report = reports_dir[-1] last_report_file = PuppetReportsConfig.reports_dir + '/' + report_dir + '/' + last_report with open(last_report_file, "r") as stream: data = yaml.load(stream) data = map_value(data) results = compute_metrics(data) logger('verb', "ready to send") for k in results: logger('verb', ("pushing value for %s => %s = %s" % (report_dir, k, results[k]))) val = collectd.Values(plugin=NAME, type='gauge') val.plugin_instance = report_dir val.type_instance = k try: val.values = [ float(results[k]) ] except: logger('warn', ("value %s => %s for %s cannot be parsed to float" % (k, results[k], report_dir))) val.values = [ 0.0 ] val.dispatch()
def get_yaml_or_json_file(cls, url, working_dir): """ Load yaml or json from filesystem or s3 :param url: str :param working_dir: str :return: dict """ file_content = cls.get_file(url, working_dir) try: if url.lower().endswith(".json"): return json.loads(file_content, encoding='utf-8') elif url.lower().endswith(".template"): return json.loads(file_content, encoding='utf-8') elif url.lower().endswith(".yml") or url.lower().endswith(".yaml"): if hasattr(yaml, 'FullLoader'): loader = yaml.FullLoader else: loader = yaml.Loader yaml.add_multi_constructor(u"", cls.handle_yaml_constructors, Loader=loader) return yaml.load(file_content, Loader=loader) else: raise CfnSphereException( "Invalid suffix, use [json|template|yml|yaml]") except Exception as e: raise CfnSphereException(e)
def load(contents): #example yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) stream = contents mydata = yaml.load(stream) return mydata
def read_callback(self): yaml.add_multi_constructor("!", identity) self.logger('verb', "parsing: %s" % self.report_file) time = os.path.getmtime(self.report_file) if time != self.last_report_file_mtime: with open(self.report_file, "r") as stream: self.last_report_file_mtime = time data = yaml.load(stream) data = map_value(data) results = compute_metrics(data) self.logger('verb', "ready to send") for k in results: self.logger('verb', ("pushing value for %s => %s = %s" % (self.report_file, k, results[k]))) val = collectd.Values(plugin=NAME, type='gauge') val.plugin_instance = 'last_run' val.type_instance = k #metric time is the mtime of the file which should match when puppet ran last val.time = time try: val.values = [float(results[k])] except: self.logger('warn', ( "value %s => %s for %s cannot be parsed to float" % (k, results[k], self.report_file))) val.values = [0.0] val.dispatch()
def _initialize(): """ Notes ----- Taken (with minor changes) from `Pylearn2`_. .. _Pylearn2: \ http://github.com/lisa-lab/pylearn2/blob/master/pylearn2/config/yaml_parse.py """ global IS_INITIALIZED yaml.add_multi_constructor("!obj:", _multi_constructor_obj, Loader=yaml.SafeLoader) yaml.add_multi_constructor("!import:", _multi_constructor_import, Loader=yaml.SafeLoader) yaml.add_constructor("!import", _constructor_import, Loader=yaml.SafeLoader) yaml.add_constructor("!float", _constructor_float, Loader=yaml.SafeLoader) pattern = re.compile(SCIENTIFIC_NOTATION_REGEXP) yaml.add_implicit_resolver("!float", pattern) IS_INITIALIZED = True
def load(contents): # example yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) stream = contents mydata = yaml.load(stream) return mydata
def read_callback(): yaml.add_multi_constructor("!", identity) logger('verb', "starting run") for report_dir in os.listdir(PuppetReportsConfig.reports_dir): logger('verb', "parsing: %s" % report_dir) reports_dir = os.listdir(PuppetReportsConfig.reports_dir + '/' + report_dir) reports_dir.sort last_report = reports_dir[-1] last_report_file = PuppetReportsConfig.reports_dir + '/' + report_dir + '/' + last_report with open(last_report_file, "r") as stream: data = yaml.load(stream) data = map_value(data) results = compute_metrics(data) logger('verb', "ready to send") for k in results: logger('verb', ("pushing value for %s => %s = %s" % (report_dir, k, results[k]))) val = collectd.Values(plugin=NAME, type='gauge') val.plugin_instance = report_dir val.type_instance = k try: val.values = [float(results[k])] except: logger('warn', ("value %s => %s for %s cannot be parsed to float" % (k, results[k], report_dir))) val.values = [0.0] val.dispatch()
def add_parameterized_validator(param_validator, base_tag, tag_prefix=None): """ Add a parameterized validator for the given tag prefix. If tag_prefix is None, it is automatically constructed as u'!~%s(' % param_validator.__name__ A parametrized validator is a function that accepts a document node (in the form of a Python object), a schema node (also a Python object), and other parameters (integer or string) that directly come from its complete YAML name in the schema. It returns True if the document node is valid according to the schema node. Note that the validator function does not have to recurse in sub-nodes, because XYS already does that. """ # pylint: disable-msg=C0111,W0621 if not tag_prefix: tag_prefix = u'!~%s(' % param_validator.__name__ def multi_constructor(loader, tag_suffix, node): def temp_validator(node, schema): return param_validator(node, schema, *_split_params(tag_prefix, tag_suffix)) temp_validator.__name__ = str(tag_prefix + tag_suffix) return ContructorValidatorNode(base_tag, base_tag, temp_validator)(loader, node) yaml.add_multi_constructor(tag_prefix, multi_constructor)
def configure(): """ Configures YAML parser for Step serialization and deserialization Called in drain/__init__.py """ yaml.add_multi_representer(Step, step_multi_representer) yaml.add_multi_constructor('!step', step_multi_constructor)
def get_all_host_facter_message(): # 由于puppet的yaml文件是ruby格式的,需要进行转换 yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) # 获取所有有facters信息的主机文件名称 for dirpath, dirnames, filenames in os.walk(yaml_dir): # 只需要处理yaml目录下得yaml结尾的文件 if dirpath == yaml_dir: for file in filenames: file_name, file_ext = os.path.splitext(file) if file_ext == '.yaml': host_yaml_path = yaml_dir + '/' + file # 得到yaml文件内容, 字典形式 host_yaml_result_dict = yaml_file_handle(host_yaml_path) # 对单个agent的数据进行处理 if host_yaml_result_dict: # 有key为facts,所以可以直接查找facts key值 if host_yaml_result_dict.has_key('facts'): data_dict = host_yaml_result_dict['facts'][ 'values'] # 没有的直接取 else: data_dict = host_yaml_result_dict['values'] # 现在就可以data数据进行处理,取得我们所需要得数据 result_dict = handle_facter_message(data_dict) all_host_facter_message[file_name] = result_dict # return我们最终的数据结果集 return all_host_facter_message
def modules_list(): if 'X-Auth-Token' not in request.headers: syslog.syslog("modules request failed because X-Auth-Token was missing from the request") abort(401) if request.headers['X-Auth-Token'] != app.config['AUTH_TOKEN']: app.logger.warn('modules request failed because the X-Auth-Token was incorrect') abort(401) ## ask the puppet server for a list of modules (rcode, stdout, stderr) = sysexec([app.config['PUPPET_BINARY'], "module", "--confdir", app.config['PUPPET_CONFDIR'], "list", "--render-as", "yaml"]) if rcode != 0: syslog.syslog("puppet module list failed") syslog.syslog("stdout: " + str(stdout)) syslog.syslog("stderr: " + str(stderr)) abort(500) else: ## Try to validate the YAML try: # work around the stupid ruby object shit in puppet yaml yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) modules = yaml.load(stdout) except yaml.YAMLError as ex: syslog.syslog("puppet module list returned invalid YAML") syslog.syslog("invalid YAML: " + stdout) abort(500) r = make_response(yaml.dump(modules)) r.headers['Content-Type'] = "application/x-yaml" return r
def pairs(self, filepath: Path): def _constructor(loader, tag_suffix, node): """This is needed to parse IaC syntax""" if isinstance(node, yaml.MappingNode): return loader.construct_mapping(node) if isinstance(node, yaml.SequenceNode): return loader.construct_sequence(node) ret = loader.construct_scalar(node) return f"{tag_suffix} {ret}" """ Convert custom YAML to parsable YAML - Quote unquoted values such as {{ placeholder }} - Remove text between <% %> and {% %} - Remove comments that start with # """ document = "" for line in filepath.open("r").readlines(): if re.match(r".+(\[)?\{\{.*\}\}(\])?", line): line = line.replace('"', "'") line = line.replace("{{", '"{{').replace("}}", '}}"') document += line document = re.sub(r"[<{]%.*?%[}>]", "", document, flags=re.MULTILINE | re.DOTALL) document = re.sub(r"^#.*$", "", document) # Load converted YAML yaml.add_multi_constructor("", _constructor, Loader=yaml.SafeLoader) try: code = yaml.safe_load(document) yield from self.traverse(code) except Exception as e: debug(f"{type(e)} in {filepath}")
def get_yaml_or_json_file(cls, url, working_dir): """ Load yaml or json from filesystem or s3 :param url: str :param working_dir: str :return: dict """ file_content = cls.get_file(url, working_dir) try: if url.lower().endswith(".json"): return json.loads(file_content) elif url.lower().endswith(".template"): # try to load json try: return json.loads(file_content) except Exception as e: # try to load yaml yaml.add_multi_constructor(u"", cls.handle_yaml_constructors) return yaml.load(file_content) elif url.lower().endswith(".yml") or url.lower().endswith(".yaml"): yaml.add_multi_constructor(u"", cls.handle_yaml_constructors) return yaml.load(file_content) else: raise CfnSphereException( "Invalid suffix, use [json|template|yml|yaml]") except Exception as e: raise CfnSphereException(e)
def get_content(self): """Return a single document from YAML""" def multi_constructor(loader, tag_suffix, node): """Stores all unknown tags content into a dict Original yaml: !unknown_tag - some content Python object: {"!unknown_tag": ["some content", ]} """ if type(node.value) is list: if type(node.value[0]) is tuple: return {node.tag: loader.construct_mapping(node)} else: return {node.tag: loader.construct_sequence(node)} else: return {node.tag: loader.construct_scalar(node)} yaml.add_multi_constructor("!", multi_constructor) with self.__get_file(mode="a+") as file_obj: file_obj.seek(0) self.__documents = [x for x in yaml.load_all(file_obj)] or [ {}, ] # try: # self.__documents = [x for x in yaml.load_all(file_obj)] # except IOError: # self.__documents[self.__document_id] = {} return self.__documents[self.__document_id]
def main(): logger.setLevel(logging.INFO) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console = logging.StreamHandler() console.setFormatter(formatter) logger.addHandler(console) args = parse_args() if args.verbose: logger.setLevel(logging.DEBUG) base_path = os.path.dirname(__file__) config_path = os.path.join(base_path, 'config.yml') def default_ctor(_dummy, tag_suffix, node): return tag_suffix + ' ' + node.value yaml.add_multi_constructor('', default_ctor) with open(config_path) as config_fd: config = yaml.load(config_fd, Loader=yaml.BaseLoader) test_account_id = config['test_account_id'] api_name = config['api_name'] account_id = boto3.client('sts').get_caller_identity().get('Account') if account_id != config['lambda_account_id']: sys.exit(f'The terminator must be run from the lambda account: {config["lambda_account_id"]}') cleanup(args.stage, check=args.check, force=args.force, api_name=api_name, test_account_id=test_account_id, targets=args.target)
def get_all_host_facter_message(): # 由于puppet的yaml文件是ruby格式的,需要进行转换 yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) # 获取所有有facters信息的主机文件名称 for dirpath, dirnames, filenames in os.walk(yaml_dir): # 只需要处理yaml目录下得yaml结尾的文件 if dirpath == yaml_dir: for file in filenames: file_name, file_ext = os.path.splitext(file) if file_ext == '.yaml': host_yaml_path = yaml_dir + '/' + file # 得到yaml文件内容, 字典形式 host_yaml_result_dict = yaml_file_handle(host_yaml_path) # 对单个agent的数据进行处理 if host_yaml_result_dict: # 有key为facts,所以可以直接查找facts key值 if host_yaml_result_dict.has_key('facts'): data_dict = host_yaml_result_dict['facts']['values'] # 没有的直接取 else: data_dict = host_yaml_result_dict['values'] # 现在就可以data数据进行处理,取得我们所需要得数据 result_dict = handle_facter_message(data_dict) all_host_facter_message[file_name] = result_dict # return我们最终的数据结果集 return all_host_facter_message
def configure(): """ Configures YAML parser for Step serialization and deserialization Called in drain/__init__.py """ yaml.add_multi_representer(Step, step_multi_representer) yaml.add_multi_constructor('!step', step_multi_constructor) yaml.Dumper.ignore_aliases = lambda *args: True
def __init__(self): self.vault_pass = CLI.ask_vault_passwords() self.vault_editor = VaultEditor(self.vault_pass) def vault_constructor(loader, tag_suffix, node): return self.vault_editor.vault.decrypt(node.value) yaml.add_multi_constructor('!vault', vault_constructor)
def doactualizalogpuppet(filetext): ahora=datetime.datetime.today() yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) mydata = yaml.load(filetext) host=mydata["host"].upper() host=host[:host.index('.')] estadoglobal="OK" output = StringIO.StringIO() logs=mydata["logs"] output.write("<center><br><b>Logs de puppet</b><br><br>") for item in logs: output.write("<table style='border: solid 1px #000000;width:95%;'>") if 'file' in item: output.write("<tr style='border: solid 1px #000000;'><td width='10%'>File</td><td>"+item['file']+":"+str(item['line'])+"</td></tr>") output.write("<tr style='border: solid 1px #000000;'>") if item['level'] == 'err' : estadoglobal="ERROR" output.write("<td width='10%'><font color='red'>Level</font></td>") output.write("<td><font color='red'>"+item['level']+"</font></td>") else: output.write("<td width='10%'>Level</td>") output.write("<td>"+item['level']+"</td>") output.write("</tr>") output.write("<tr><td width='10%'>Message</td><td>"+item['message']+"</td></tr>") output.write("</table><br>") output.write("<br><b>Clases y recursos aplicados</b><br><br>") recursos=mydata["resource_statuses"] output.write("<table style='border: solid 1px #000000;width:95%'>") for item in recursos: eventos=recursos[item]['events'] descripcion=recursos[item]['source_description'] estado="OK" for evento in eventos: valor=evento['status'] if valor == "failure" : estado="ERROR" estadoglobal="ERROR" break if estado=="OK": output.write("<tr style='border: solid 1px #000000;'><td width='90%'>"+ descripcion +"</td><td>"+estado+"</td></tr>") else: output.write("<tr style='border: solid 1px #000000;'><td width='90%'><font color='red'>"+ descripcion +"</font></td><td><font color='red'>"+estado+"</font></td></tr>") output.write("</table><br><br></center>") fila=cdb((cdb.maquinas.host.upper()==host) & (cdb.maquinas.tipohost!='WINDOWS')).select().last() if fila==None: pass else: #Movemos el puntero del fichero al comienzo del mismo, ya que si no no se vuelca a la tabla. output.seek(0) fila.update_record(ultimopuppet=ahora,estadopuppet=estadoglobal,logpuppet=cdb.maquinas.logpuppet.store(output,filename=host)) output.close() return "OK"
def parse_template(template): try: return json.loads(template) except Exception: yaml.add_multi_constructor('', moto.cloudformation.utils.yaml_tag_constructor, Loader=NoDatesSafeLoader) try: return yaml.safe_load(template) except Exception: return yaml.load(template, Loader=NoDatesSafeLoader)
def init_yaml_ruby_parsers(): def construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def construct_ruby_sym(loader, node): return loader.construct_yaml_str(node) yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym)
def configure_callback(conf): yaml.add_multi_constructor("!", identity) logger('verb', "configuring") for node in conf.children: if node.key == 'ReportsDir': PuppetReportsConfig.reports_dir = node.values[0] else: logger('verb', "unknown config key in puppet module: %s" % node.key)
def parase_yaml(): yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) stream = file('/home/201602250326.yaml', 'r') mydata = yaml.load(stream) return mydata
def parase_yaml(): yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) stream = file('/home/201602250326.yaml','r') mydata = yaml.load(stream) return mydata
def _init_parser(self): # Custom YAML constructs for ruby objects for puppet files parsing def _construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def _construct_ruby_sym(loader, suffix, node): return loader.construct_yaml_str(node) yaml.add_multi_constructor(u"!ruby/object:", _construct_ruby_object) yaml.add_multi_constructor(u"!ruby/sym", _construct_ruby_sym)
def _register_default_constructors(self): for m in factory.mappings: yaml.add_constructor("!" + m + ":", self._mapping_factory(m)) for s in factory.sequences: yaml.add_constructor("!" + s + ":", self._sequence_factory(s)) yaml.add_constructor("!Figure:", self._figure_constructor) yaml.add_constructor("!Document:", self._document_constructor) yaml.add_multi_constructor(u"!self", self._self_constructor)
def _init_parser(self): #Custom YAML constructs for ruby objects for puppet files parsing def _construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def _construct_ruby_sym(loader, suffix, node): return loader.construct_yaml_str(node) yaml.add_multi_constructor(u"!ruby/object:", _construct_ruby_object) yaml.add_multi_constructor(u"!ruby/sym", _construct_ruby_sym)
def initialize(): """ Initialize the configuration system by installing YAML handlers. Automatically done on first call to load() specified in this file. """ global is_initialized # Add the custom multi-constructor yaml.add_multi_constructor('!obj:', multi_constructor) yaml.add_multi_constructor('!pkl:', multi_constructor_pkl) is_initialized = True
def __init__(self, file_path: str): yaml.add_multi_constructor( '', self.__any_constructor, Loader=yaml.SafeLoader ) self.file_path = file_path self.devices: List[device.DeviceAbstract] = [] self.simulators: List[device.Simulator] = [] self.__read_file()
def _load_yaml(path): yaml.add_multi_constructor('', default_ctor, Loader=Loader) data = None with open(path, 'r') as f: yaml_elements = yaml.load_all(f, Loader=Loader) for x in yaml_elements: data = x return data
def load_serverless_yml(): # Ignore ! in yaml. Currenly only looking for a few fields # and not trying to parse the entire document. yaml.add_multi_constructor('!', lambda loader, suffix, node: None) try: with open("serverless.yml", "r+") as f: yml = yaml.load(f, Loader=yaml.Loader) return yml except IOError: print("This command can only be run in a Serverless service directory")
def main(yaml_dir, output): """ :param yaml_dir: :type yaml_dir: str :param output: :type output: file :rtype: None """ tl = [] def default_ctor(loader, tag_suffix, node): if tag_suffix == 'java.util.ArrayList': return loader.construct_sequence(node, deep=True) else: return loader.construct_mapping(node, deep=True) yaml.add_multi_constructor('!', default_ctor) def find_labels(d): if 'label' in d: yield d['label'] if 'body' in d: yield d['body'] if 'description' in d: yield d['description'] for k in d: if isinstance(d, str): continue if isinstance(d[k], list): for i in d[k]: for j in find_labels(i): yield j for root, dir, files in os.walk(yaml_dir): for fpath in filter(lambda x: x.endswith('.yml'), files): with open(os.path.join(root, fpath)) as f: tl += list(find_labels(yaml.load(f))) try: with open(output.name) as existing_f: tl_dict = json.loads(existing_f.read()) except (ValueError, IOError): tl_dict = {} for s in tl: if s not in tl_dict: tl_dict[s] = "" output.write(unicode(json.dumps(tl_dict, indent=4, sort_keys=True)))
def initialize(): """Add constructors to yaml parser. """ from eemeter.meter.base import MeterBase from eemeter.models.temperature_sensitivity import Model yaml.add_multi_constructor('!obj:', multi_constructor_obj) yaml.add_constructor('!setting', constructor_setting) yaml.add_multi_representer(MeterBase, multi_representer_obj) yaml.add_multi_representer(Model, multi_representer_obj) is_initialized = True
def create(cont): def construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def construct_ruby_sym(loader, node): return loader.construct_yaml_str(node) def timestamp_constructor(loader, node): return dateutil.parser.parse(node.value) yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor) d = yaml.load(cont) certname = d['host'] hostname = certname[0:-5] hosts = Host.objects.filter(name__iexact=hostname) if hosts: host = hosts.first() ppreport = PPREPORT() ppreport.status = d['status'] ppreport.time = d['time'] ppreport.version = d['puppet_version'] ppreport.save() for m1 in d['metrics']: for m2 in d['metrics'][m1]['values']: ppmetrics = PPMETRICS() ppmetrics.report = ppreport ppmetrics.category = d['metrics'][m1]['name'] ppmetrics.name = m2[0] ppmetrics.value = m2[2] ppmetrics.save() for rel in d['logs']: ppreportlog = PPREPORTLOG() ppreportlog.report = ppreport ppreportlog.level = rel['level'] ppreportlog.message = rel['message'] ppreportlog.time = rel['time'] ppreportlog.save() res = Resource() res.host = host res.name = "" res.type = "ppreport" res.resource_id = ppreport.id res.save()
def create(cont): def construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def construct_ruby_sym(loader, node): return loader.construct_yaml_str(node) def timestamp_constructor(loader, node): return dateutil.parser.parse(node.value) yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor) d=yaml.load(cont) certname=d['host'] hostname=certname[0:-5] hosts=Host.objects.filter(name__iexact=hostname) if hosts: host=hosts.first() ppreport=PPREPORT() ppreport.status=d['status'] ppreport.time=d['time'] ppreport.version=d['puppet_version'] ppreport.save() for m1 in d['metrics']: for m2 in d['metrics'][m1]['values']: ppmetrics=PPMETRICS() ppmetrics.report=ppreport ppmetrics.category=d['metrics'][m1]['name'] ppmetrics.name=m2[0] ppmetrics.value=m2[2] ppmetrics.save() for rel in d['logs']: ppreportlog=PPREPORTLOG() ppreportlog.report=ppreport ppreportlog.level=rel['level'] ppreportlog.message=rel['message'] ppreportlog.time=rel['time'] ppreportlog.save() res=Resource() res.host=host res.name="" res.type="ppreport" res.resource_id=ppreport.id res.save()
def configure_callback(self, conf): yaml.add_multi_constructor("!", identity) self.logger('verb', "configuring") for node in conf.children: if node.key == 'LastReportFile': self.report_file = node.values[0] elif node.key == 'Verbose': self.verbose = node.values[0] else: self.logger( 'verb', "unknown config key in puppet module: %s" % node.key)
def __init__(self, repoOrder, repoSortField, startTime, pageSize): self.repoOrder = repoOrder or 'DESC' self.repoSortField = repoSortField or 'PUSHED_AT' self.startTime = startTime or None self.pageSize = pageSize or 100 self.githubAPIKey = os.environ['GITHUB_API_KEY'] self.githubAPIRoot = os.environ['GITHUB_API_ROOT'] self.cursor = None self.repos = [] self.dataFiles = [] yaml.add_multi_constructor('!', GutenbergManager.default_ctor)
def _get_all_facts_by_hostnames(self, hostnames): def construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def construct_ruby_sym(loader, node): return loader.construct_yaml_str(node) yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) for hostname in hostnames: data = self._get_data_for_hostname(hostname) if data: return yaml.load(data)['values']
def add_constructors(): yaml.add_constructor(u"!assert", assert_constructor) yaml.add_constructor(u"!record", record_constructor) yaml.add_constructor(u"!python", python_constructor) yaml.add_constructor(u"!menuitem", menuitem_constructor) yaml.add_constructor(u"!act_window", act_window_constructor) yaml.add_constructor(u"!function", function_constructor) yaml.add_constructor(u"!report", report_constructor) yaml.add_constructor(u"!context", context_constructor) yaml.add_constructor(u"!delete", delete_constructor) yaml.add_constructor(u"!url", url_constructor) yaml.add_constructor(u"!eval", eval_constructor) yaml.add_multi_constructor(u"!ref", ref_constructor)
def add_constructors(): yaml.add_constructor(u"!assert", assert_constructor) yaml.add_constructor(u"!record", record_constructor) yaml.add_constructor(u"!python", python_constructor) yaml.add_constructor(u"!menuitem", menuitem_constructor) yaml.add_constructor(u"!act_window", act_window_constructor) yaml.add_constructor(u"!function", function_constructor) yaml.add_constructor(u"!report", report_constructor) yaml.add_constructor(u"!context", context_constructor) yaml.add_constructor(u"!delete", delete_constructor) yaml.add_constructor(u"!url", url_constructor) yaml.add_constructor(u"!eval", eval_constructor) yaml.add_multi_constructor(u"!ref", ref_constructor) yaml.add_constructor(u"!ir_set", ir_set_constructor)
def configure_callback(conf): yaml.add_multi_constructor("!", identity) logger('verb', "configuring") for node in conf.children: if node.key == 'ReportsDir': PuppetReportsConfig.reports_dir = node.values[0] elif node.key == 'Verbose': PuppetReportsConfig.verbose = True elif node.key == 'Interval': PuppetReportsConfig.interval = float(node.values[0]) logger('verb', "set interval to: %s" % PuppetReportsConfig.interval) else: logger('verb', "unknown config key in puppet module: %s" % node.key)
def load_yaml(stream): """Parse Puppet YAML into Python objects""" # Puppet YAML contains Ruby objects which need to be defined def construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def construct_ruby_sym(loader, node): return loader.construct_yaml_str(node) yaml.add_multi_constructor('!ruby/object:', construct_ruby_object) yaml.add_constructor('!ruby/sym', construct_ruby_sym) try: document = yaml.load(stream) except yaml.loader.ConstructorError as e: if "unhashable type: 'list'" in str(e): raise NotImplementedError("http://pyyaml.org/ticket/169") else: raise return document
def readMonitoringConfig(): # Use Puppet's parser to convert monitoringserver.pp into YAML manifest = os.path.join(os.path.dirname(__file__), 'manifests', 'monitoringserver.pp') parseScript = ''' require 'puppet/parser' parser = Puppet::Parser::Parser.new(Puppet[:environment]) parser.file = ARGV[0] print ZAML.dump(parser.parse) ''' data, dummy = subprocess.Popen(['ruby', '', manifest], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate(parseScript) # See http://stackoverflow.com/q/8357650/785541 on parsing Puppet's YAML yaml.add_multi_constructor(u"!ruby/object:", lambda loader, suffix, node: loader.construct_yaml_map(node)) yaml.add_constructor(u"!ruby/sym", lambda loader, node: loader.construct_yaml_str(node)) return yaml.load(data)
def buildMetadata(self): TARFile.buildMetadata(self) try: import yaml except: print("Skipping GEM analysis of %s because YAML is not supported by this python" % self.filename) return try: import gzip except: print("Skipping GEM analysis of %s because GZip is not supported by this python" % self.filename) return print "Doing GEM analysis..." try: tf = tarfile.open(self.filename, "r") for tarEntry in tf.getmembers(): if not tarEntry.isfile(): continue if not tarEntry.name == 'metadata.gz': continue handle = tf.extractfile(tarEntry) gzipHandle = gzip.GzipFile(fileobj=handle) yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym", construct_ruby_sym) gemSpec = yaml.load(gzipHandle) # print "FOUND GEMSPEC %s" % str(gemSpec) # For debugging.... # import pprint # pp = pprint.PrettyPrinter(indent=4) # pp.pprint(gemSpec) self.makeMetadataFromGemspec(gemSpec) files.filetype.qClose(gzipHandle) files.filetype.qClose(handle) finally: files.filetype.qClose(tf)
def test_short_form_func_in_yaml_teamplate(): template = """--- KeyB64: !Base64 valueToEncode KeyRef: !Ref foo KeyAnd: !And - A - B KeyEquals: !Equals [A, B] KeyIf: !If [A, B, C] KeyNot: !Not [A] KeyOr: !Or [A, B] KeyFindInMap: !FindInMap [A, B, C] KeyGetAtt: !GetAtt A.B KeyGetAZs: !GetAZs A KeyImportValue: !ImportValue A KeyJoin: !Join [ ":", [A, B, C] ] KeySelect: !Select [A, B] KeySplit: !Split [A, B] KeySub: !Sub A """ yaml.add_multi_constructor('', yaml_tag_constructor) template_dict = yaml.load(template) key_and_expects = [ ['KeyRef', {'Ref': 'foo'}], ['KeyB64', {'Fn::Base64': 'valueToEncode'}], ['KeyAnd', {'Fn::And': ['A', 'B']}], ['KeyEquals', {'Fn::Equals': ['A', 'B']}], ['KeyIf', {'Fn::If': ['A', 'B', 'C']}], ['KeyNot', {'Fn::Not': ['A']}], ['KeyOr', {'Fn::Or': ['A', 'B']}], ['KeyFindInMap', {'Fn::FindInMap': ['A', 'B', 'C']}], ['KeyGetAtt', {'Fn::GetAtt': ['A', 'B']}], ['KeyGetAZs', {'Fn::GetAZs': 'A'}], ['KeyImportValue', {'Fn::ImportValue': 'A'}], ['KeyJoin', {'Fn::Join': [ ":", [ 'A', 'B', 'C' ] ]}], ['KeySelect', {'Fn::Select': ['A', 'B']}], ['KeySplit', {'Fn::Split': ['A', 'B']}], ['KeySub', {'Fn::Sub': 'A'}], ] for k, v in key_and_expects: template_dict.should.have.key(k).which.should.be.equal(v)
def load_yaml(filepath, safe=True, omit_tags=False): ''' Helper class that loads the given yaml filepath into a python object. safe: bool. When True, will use yaml's safe_loader. omit_tags: bool. When True, will skip any yaml tags in the file. This can be useful when you want to read a yaml file but don't have all of the necessary yaml tag constructors to do so. All tags in the file will be given a value of u'<TAG OMITTED>' This function is somewhat complex(and ugly) because it must create it's own functions and classes. The reason it must create it's own Loader class is because when adding a custom constructor, that constructor remains loaded (and imbedded in the PyYaml's native Loader class) for the duration of the python session. This could lead to disastrous behavior for customers that may be using the same session to do additional python work (think inside of Maya). So instead of using PyYaml's Loader classes directly, we subclass our own so that we can dispose of it when we're done. This all could have been avoided if PyYaml also provided a "remove_multi_constructor" function. ''' loader = yaml.SafeLoader if safe else yaml.Loader def ommitter_constructor(loader, tag_suffix, node): ''' Instead of loading the yaml tag as an object, simply return a string that indicated that it was omitted. ''' return tag_suffix + u' <TAG OMITTED>' if omit_tags: # Create our own Loader class and add our omission constructor to it class TmpLoader(loader): pass loader = TmpLoader yaml.add_multi_constructor(u'', ommitter_constructor, Loader=loader) logger.debug("Using yaml loader: %s", loader.__name__) with open(filepath) as f: return yaml.load(f, loader) # nosec (ignore bandit static analysis warning for not using safe_load [B506:yaml_load] )
def get_yaml_or_json_file(cls, url, working_dir): """ Load yaml or json from filesystem or s3 :param url: str :param working_dir: str :return: dict """ file_content = cls.get_file(url, working_dir) try: if url.lower().endswith(".json"): return json.loads(file_content) elif url.lower().endswith(".template"): return json.loads(file_content) elif url.lower().endswith(".yml") or url.lower().endswith(".yaml"): yaml.add_multi_constructor(u"", cls.handle_yaml_constructors) return yaml.load(file_content) else: raise CfnSphereException("Invalid suffix, use [json|template|yml|yaml]") except Exception as e: raise CfnSphereException(e)
def add_parameterized_validator(param_validator, base_tag, tag_prefix=None): """ Add a parameterized validator for the given tag prefix. If tag_prefix is None, it is automatically constructed as u'!~%s(' % param_validator.__name__ A parametrized validator is a function that accepts a document node (in the form of a Python object), a schema node (also a Python object), and other parameters (integer or string) that directly come from its complete YAML name in the schema. It returns True if the document node is valid according to the schema node. Note that the validator function does not have to recurse in sub-nodes, because XYS already does that. """ # pylint: disable-msg=C0111,W0621 if not tag_prefix: tag_prefix = u'!~%s(' % param_validator.__name__ def multi_constructor(loader, tag_suffix, node): def temp_validator(node, schema): return param_validator(node, schema, *_split_params(tag_prefix, tag_suffix)) temp_validator.__name__ = str(tag_prefix + tag_suffix) return ValidatorNode(_construct_node(loader, node, base_tag), temp_validator) yaml.add_multi_constructor(tag_prefix, multi_constructor)
def initialize(): """ Initialize the configuration system by installing YAML handlers. Automatically done on first call to load() specified in this file. """ global is_initialized # Add the custom multi-constructor yaml.add_multi_constructor('!obj:', multi_constructor_obj) yaml.add_multi_constructor('!pkl:', multi_constructor_pkl) yaml.add_multi_constructor('!import:', multi_constructor_import) yaml.add_multi_constructor('!value:', multi_constructor_value) yaml.add_multi_constructor('!multiseq:', multi_constructor_multiseq) yaml.add_constructor('!import', constructor_import) yaml.add_constructor("!float", constructor_float) yaml.add_constructor("!int", constructor_int) yaml.add_constructor("!range", constructor_range) pattern = re.compile(SCIENTIFIC_NOTATION_REGEXP) yaml.add_implicit_resolver('!float', pattern) is_initialized = True
def get_content(self): """Return a single document from YAML""" def multi_constructor(loader, tag_suffix, node): """Stores all unknown tags content into a dict Original yaml: !unknown_tag - some content Python object: {"!unknown_tag": ["some content", ]} """ if type(node.value) is list: if type(node.value[0]) is tuple: return {node.tag: loader.construct_mapping(node)} else: return {node.tag: loader.construct_sequence(node)} else: return {node.tag: loader.construct_scalar(node)} yaml.add_multi_constructor("!", multi_constructor) with self.__get_file() as file_obj: self.__documents = [x for x in yaml.load_all(file_obj)] return self.__documents[self.__document_id]
def initialize(): """ Initialize the configuration system by installing YAML handlers. Automatically done on first call to load() specified in this file. """ global is_initialized # Add the custom multi-constructor yaml.add_multi_constructor('!obj:', multi_constructor) yaml.add_multi_constructor('!pkl:', multi_constructor_pkl) yaml.add_multi_constructor('!import:', multi_constructor_import) yaml.add_multi_constructor('!include:', multi_constructor_include) def import_constructor(loader, node): value = loader.construct_scalar(node) return try_to_import(value) yaml.add_constructor('!import', import_constructor) yaml.add_implicit_resolver( '!import', re.compile(r'(?:[a-zA-Z_][\w_]+\.)+[a-zA-Z_][\w_]+') ) is_initialized = True
def initialize(): """ Initialize the configuration system by installing YAML handlers. Automatically done on first call to load() specified in this file. """ global is_initialized # Add the custom multi-constructor yaml.add_multi_constructor("!obj:", multi_constructor_obj) yaml.add_multi_constructor("!pkl:", multi_constructor_pkl) yaml.add_multi_constructor("!import:", multi_constructor_import) yaml.add_constructor("!import", constructor_import) yaml.add_implicit_resolver("!import", re.compile(r"(?:[a-zA-Z_][\w_]+\.)+[a-zA-Z_][\w_]+")) yaml.add_constructor("!float", constructor_float) yaml.add_implicit_resolver("!float", re.compile(r" [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?")) is_initialized = True
def _parse_template(self): yaml.add_multi_constructor('', yaml_tag_constructor) try: self.template_dict = yaml.load(self.template) except yaml.parser.ParserError: self.template_dict = json.loads(self.template)
#!/usr/bin/env python import socket import yaml from subprocess import Popen, PIPE import subprocess def construct_ruby_object(loader, suffix, node): return loader.construct_yaml_map(node) def construct_ruby_sym(loader, node): return loader.construct_yaml_str(node) # for the correct loader for the Ruby object, so PyYAML can read the data after that yaml.add_multi_constructor(u"!ruby/object:", construct_ruby_object) yaml.add_constructor(u"!ruby/sym:", construct_ruby_sym) certout = subprocess.check_output(['puppet', 'ca', 'list', '--render-as', 'yaml']) list = yaml.load(certout) for i in list: oprnt = "unknown host" cname = i['name'] pingc = Popen(['/bin/ping', '-c2', cname], stdout = PIPE, stderr = PIPE) #.stdout.read() output, error_output = pingc.communicate() if pingc.returncode: #print (error_output) print output else:
def generic_object(loader, suffix, node): if isinstance(node, yaml.ScalarNode): constructor = loader.__class__.construct_scalar elif isinstance(node, yaml.SequenceNode): constructor = loader.__class__.construct_sequence elif isinstance(node, yaml.MappingNode): constructor = loader.__class__.construct_mapping else: raise ValueError(node) # TODO(tailhook) wrap into some object? return constructor(loader, node) yaml.add_multi_constructor('!', generic_object, Loader=FancyLoader) def load(f): return yaml.load(f, Loader=FancyLoader) def find_config(): path = pathlib.Path(os.getcwd()) suffix = pathlib.Path("") while str(path) != path.root: vagga = path / 'vagga.yaml' if vagga.exists(): return path, vagga, suffix suffix = path.parts[-1] / suffix path = path.parent raise RuntimeError("No vagga.yaml found in path {!r}".format(path))