def unicode_representer(dumper, data): """ Add a custom unicode representer to use block literals for multiline strings in unicode. """ if len(data.splitlines()) > 1: return yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=data, style='|') return yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=data)
def aws_cloudformation_intrinsic_function(loader, node): fn = f"Fn::{node.tag[1:]}" if node.tag in ('!Ref', '!Condition'): fn = 'Ref' elif node.tag == '!GetAtt' and isinstance(node.value, str): path = node.value.split(".", maxsplit=2) node = yaml.SequenceNode(tag="tag:yaml.org,2002:seq", value=[str_node(p) for p in path]) sub_node: yaml.Node if isinstance(node, yaml.SequenceNode): sub_node = seq_node(node.value) elif isinstance(node, yaml.MappingNode): sub_node = map_node(node.value) elif node.value is None: sub_node = yaml.ScalarNode('tag:yaml.org,2002:null', node.value) elif isinstance(node.value, str): sub_node = yaml.ScalarNode('tag:yaml.org,2002:str', node.value) elif isinstance(node.value, bool): sub_node = yaml.ScalarNode('tag:yaml.org,2002:bool', node.value) elif isinstance(node.value, int): sub_node = yaml.ScalarNode('tag:yaml.org,2002:int', node.value) elif isinstance(node.value, float): sub_node = yaml.ScalarNode('tag:yaml.org,2002:float', node.value) else: raise ValueError(node) new_node = map_node([(str_node(fn), sub_node)]) return loader.construct_object(new_node)
def handle_tag(self, node_name, node): # I just *know* there are gonna be problems with simply returning a # Scalar, but I don't give a f**k at this point. if node_name == "vault": new_node = yaml.ScalarNode(node_name, 'ENCRYPTED CONTENTS REDACTED') else: new_node = yaml.ScalarNode(node_name, node.value) return self.construct_scalar(new_node)
def _node(self): """ Create a yaml node object. """ node = yaml.SequenceNode( tag='tag:yaml.org,2002:seq', value=[ yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='a'), yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='b'), yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='c'), ]) return node
def test_parse_yaml_nodes(self): obj = {'test': yaml.ScalarNode('tag:yaml.org,2002:int', '123')} result = common.clone_safe(obj) self.assertEqual(result, {'test': 123}) obj = {'foo': [ yaml.ScalarNode('tag:yaml.org,2002:str', 'value'), yaml.ScalarNode('tag:yaml.org,2002:int', '123'), yaml.ScalarNode('tag:yaml.org,2002:float', '1.23'), yaml.ScalarNode('tag:yaml.org,2002:bool', 'true') ]} result = common.clone_safe(obj) self.assertEqual(result, {'foo': ['value', 123, 1.23, True]})
def test_parse_yaml_nodes(self): obj = {"test": yaml.ScalarNode("tag:yaml.org,2002:int", "123")} result = common.clone_safe(obj) assert result == {"test": 123} obj = { "foo": [ yaml.ScalarNode("tag:yaml.org,2002:str", "value"), yaml.ScalarNode("tag:yaml.org,2002:int", "123"), yaml.ScalarNode("tag:yaml.org,2002:float", "1.23"), yaml.ScalarNode("tag:yaml.org,2002:bool", "true"), ] } result = common.clone_safe(obj) assert result == {"foo": ["value", 123, 1.23, True]}
def unicode_representer(_, data): has_wide_lines = False for line in data.splitlines(): if len(line) > 80: has_wide_lines = True break if has_wide_lines: return yaml.ScalarNode(u'tag:yaml.org,2002:str', data, style='>') if "\n" in data: return yaml.ScalarNode(u'tag:yaml.org,2002:str', data, style='|') return yaml.ScalarNode(u'tag:yaml.org,2002:str', data, style='')
def python_object_string_representer(dumper, data): full_class_name = data.__class__.__name__ if data.__module__: full_class_name = data.__module__ + '.' + full_class_name return yaml.ScalarNode( "tag:yaml.org,2002:python/object:%s" % full_class_name, str(data), style=None)
def represent_time_period(dumper, data): dictionary = data.as_dict() if len(dictionary) == 1: unit, value = dictionary.popitem() out = '{}{}'.format(value, TIME_PERIOD_UNIT_MAP[unit]) return yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=out) return represent_odict(dumper, 'tag:yaml.org,2002:map', dictionary)
def run(tree: fdt.FdtParser, hardware: rule.HardwareYaml, config: config.Config, args: argparse.Namespace): if not args.yaml_out: raise ValueError( 'you need to provide a yaml-out to use the yaml output method') phys_mem, reserved, _ = memory.get_physical_memory(tree, config) kernel_devs = get_kernel_devices(tree, hardware) dev_mem = memory.get_addrspace_exclude( list(reserved) + phys_mem + kernel_devs, config) yaml.add_representer( int, lambda dumper, data: yaml.ScalarNode('tag:yaml.org,2002:int', hex(data))) yaml_obj = { 'devices': [{ 'start': r.base, 'end': r.base + r.size } for r in dev_mem if r.size > 0], 'memory': [{ 'start': r.base, 'end': r.base + r.size } for r in phys_mem if r.size > 0] } yaml.dump(yaml_obj, args.yaml_out) args.yaml_out.close()
def from_yaml(cls, loader, node): logging.debug('{}:from_yaml(loader={})'.format(cls.__name__, loader)) default, select, value_desc = None, list(), None # find value description for elem in node.value: for key, value in elem.value: if key.value == 'VALUE': assert value_desc is None, "VALUE key already set" value_desc = value if key.value == 'SELECT': select.append(loader.construct_mapping(value)) if key.value == 'DEFAULT': assert default is None, "DEFAULT key already set" default = loader.construct_object(value) # if VALUE key isn't given, use default VALUE key # format: `VALUE: !Number '{vl.value}'` if value_desc is None: value_desc = yaml.ScalarNode(tag=u'!Number', value=u'{vl.value}') # select collectd metric based on SELECT condition metrics = loader.collector.items(select) assert len(metrics) < 2, \ 'Wrong SELECT condition {}, selected {} metrics'.format( select, len(metrics)) if len(metrics) > 0: item = cls.format_node(value_desc, { 'vl': metrics[0], 'system': loader.system }) return loader.construct_object(item) # nothing has been found by SELECT condition, set to DEFAULT value. assert default is not None, "No metrics selected by SELECT condition" \ " {} and DEFAULT key isn't set".format(select) return default
def main() -> None: parser = argparse.ArgumentParser( description="Shows AI classes with non-trivial class hierarchies.") parser.add_argument("--type", help="AI class type to visualise", choices=["Action", "AI", "Behavior", "Query"], required=True) parser.add_argument( "--out-names", help="Path to which a vtable -> name map will be written", required=True) args = parser.parse_args() all_vtables = ai_common.get_vtables() graph = Graph() reverse_graph = Graph() build_graph(all_vtables, args.type, graph, reverse_graph) interesting_nodes = set() node_colors = dict() colors = [ "#c7dcff", "#ffc7c7", "#ceffc7", "#dcc7ff", "#fffdc9", "#c9fff3", "#ffe0cc", "#ffcffe", "#96a8ff" ] components = graph.find_connected_components() num_nontrivial_cc = 0 for i, comp in enumerate(components): if len(comp) == 2: continue for node in comp: node_colors[node] = colors[i % len(colors)] num_nontrivial_cc += 1 interesting_nodes |= set(comp) print("digraph {") print("node [shape=rectangle]") for u in graph.nodes: if u not in interesting_nodes: continue for v in graph.nodes[u]: shape_u = "shape=component," if "[V]" not in u else "" shape_v = "shape=component," if "[V]" not in v else "" print( f'"{u}" [{shape_u}style=filled, fillcolor="{node_colors[u]}"]') print( f'"{v}" [{shape_v}style=filled, fillcolor="{node_colors[v]}"]') print(f'"{u}" -> "{v}"') print("}") print(f"# {len(components)} connected components") print(f"# {num_nontrivial_cc} non-trivial connected components") yaml.add_representer(int, lambda dumper, data: yaml.ScalarNode( 'tag:yaml.org,2002:int', f"{data:#x}"), Dumper=yaml.CSafeDumper) with Path(args.out_names).open("w") as f: yaml.dump(_known_vtables, f, Dumper=yaml.CSafeDumper)
def float_representer(dumper, data): value = (u'%.6f' % data).rstrip("0") if len(value) == 0 or value.endswith("."): value += "0" #ret = dumper.represent_scalar('!float', value) ret = yaml.ScalarNode(u'tag:yaml.org,2002:float', value) #print ret, dir(ret), repr(ret) return ret
def _env_var_constructor(loader, node): raw_value = loader.construct_scalar(node) value = ENV_VAR_MATCHER.sub(_replace_env_var, raw_value) use_implicit_scalar_resolver = True # PyYAML requires tuple/list value for `implicit` arg in `resolve` method # containing two items. Second one is not used so passing `None` here. new_tag = loader.resolve(yaml.ScalarNode, value, (use_implicit_scalar_resolver, None)) new_node = yaml.ScalarNode(new_tag, value) return loader.yaml_constructors[new_tag](loader, new_node)
def represent_unicode(_, data): """ Represent unicode as regular string Source: https://stackoverflow.com/questions/1950306/pyyaml-dumping-without-tags """ return yaml.ScalarNode(tag="tag:yaml.org,2002:str", value=data.encode("utf-8"))
def convert(value): if isinstance(value, yaml.Node): return value if isinstance(value, FuncRef): return value if isinstance(value, int): return IntScalar(value) if isinstance(value, float): return FloatScalar(value) if isinstance(value, str): return yaml.ScalarNode('tag:yaml.org,2002:str', value) raise ValueError("cannot transform {} to node".format(value))
def format_node(cls, mapping, metric): if mapping.tag in [ 'tag:yaml.org,2002:str', Bytes2Kibibytes.yaml_tag, Number.yaml_tag, StripExtraDash.yaml_tag ]: return yaml.ScalarNode(mapping.tag, mapping.value.format(**metric)) elif mapping.tag == 'tag:yaml.org,2002:map': values = [] for key, value in mapping.value: values.append((yaml.ScalarNode(key.tag, key.value), cls.format_node(value, metric))) return yaml.MappingNode(mapping.tag, values) elif mapping.tag in [ArrayItem.yaml_tag, ValueItem.yaml_tag]: values = [] for seq in mapping.value: map_values = list() for key, value in seq.value: if key.value == 'SELECT': map_values.append((yaml.ScalarNode(key.tag, key.value), cls.format_node(value, metric))) else: map_values.append((yaml.ScalarNode(key.tag, key.value), value)) values.append(yaml.MappingNode(seq.tag, map_values)) return yaml.SequenceNode(mapping.tag, values) elif mapping.tag in [MapValue.yaml_tag]: values = [] for key, value in mapping.value: if key.value == 'VALUE': values.append((yaml.ScalarNode(key.tag, key.value), cls.format_node(value, metric))) else: values.append((yaml.ScalarNode(key.tag, key.value), value)) return yaml.MappingNode(mapping.tag, values) return mapping
def create_yaml_file(dev_mem, phys_mem, outputStream): yaml.add_representer( int, lambda dumper, data: yaml.ScalarNode('tag:yaml.org,2002:int', hex(data))) yaml_obj = { 'devices': make_yaml_list_of_regions(dev_mem), 'memory': make_yaml_list_of_regions(phys_mem) } with outputStream: yaml.dump(yaml_obj, outputStream)
def set_env(self, var_name, var_value): self._lazy_load() var_value = var_value.strip() env_var = None for member in self._env_vars: if member[0].value == var_name: env_var = member break if var_value: if not env_var: env_var_name = yaml.ScalarNode('tag:yaml.org,2002:str', var_name) env_var_value = yaml.ScalarNode('tag:yaml.org,2002:str', var_value) env_var = (env_var_name, env_var_value) self._env_vars.append(env_var) else: env_var[1].value = var_value else: if env_var: self._env_vars.remove(env_var)
def to_yaml(cls, dumper, data): return yaml.MappingNode(cls.yaml_tag, value=[ (yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='start'), yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=TimeStamp.format_datetime(data.start))), (yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='run'), yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=TimeStamp.format_duration(data.run))), (yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='end'), yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=TimeStamp.format_datetime(data.end))) ])
def extract_note(note_node, defaults): note = yaml.load(yaml.serialize(note_node)) if 'id' not in note: note['id'] = 0 # We want to update the YAML file with a new note ID, but modifying a YAML # file is complicated. One way to do it is via a lower-level interface. Here # we use the nodes representation, which we modify directly. The nodes # representation can then be converted into a YAML file that fairly # faithfully resembles the original, but with the newly added note ID. note_node.value.insert(0, ( yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='id'), yaml.ScalarNode(tag='tag:yaml.org,2002:int', value=str( note['id'])), )) note.setdefault('deckName', defaults.get("deckName", "Default")) note.setdefault('modelName', defaults.get("modelName", "BasicMathJax")) note.setdefault('useMarkdown', defaults.get("useMarkdown", True)) note.setdefault('markdownStyle', defaults.get("markdownStyle", "tango")) note.setdefault('markdownLineNums', defaults.get("markdownLineNums", False)) note.setdefault('markdownTabLength', defaults.get("markdownTabLength", 4)) note.setdefault('useMarkdownMathExt', defaults.get("useMarkdownMathExt", True)) tags = defaults.get("extraTags", list()).copy() tags.extend(note.get('tags', list())) note['tags'] = ',{},'.format(','.join(sorted(tags))) fields = dict(defaults.get("fields", dict())) fields.update(note.get("fields", dict())) note['fields'] = fields note['node'] = note_node return note
def DiveBits_generate_templates(component_objects: list, hosttime_id: str, template_path: str): template_file_yaml = template_path + "/db_template.yaml" template_file_json = template_path + "/db_template.json" db_template_components = [] for comp in component_objects: db_template_components.append(comp.generate_component_template()) # write template file in YAML yaml.add_representer( HexInt, lambda dumper, repdata: yaml.ScalarNode( 'tag:yaml.org,2002:int', hex(repdata))) template_file = open(template_file_yaml, 'w') template_file.write( "# READONLY branches can be dropped in bitstream config files;\n") template_file.write( "# the corresponding data is matched through the BLOCK_PATH\n") yaml.dump( { "Hosttime_ID": hosttime_id, "db_components": db_template_components }, template_file, sort_keys=False) template_file.close() # write template file in JSON template_file = open(template_file_json, 'w') json.dump( { "Hosttime_ID": hosttime_id, "db_components": db_template_components }, template_file, sort_keys=False, indent=2, separators=(',\n', ':')) template_file.close()
def compose_sequence_node(self, anchor): state = {} if self.is_object_path: state['in_object_path'] = True with override(self, state): node = super().compose_sequence_node(anchor) # Any sequences inside object paths are actually slice notation. # e.g. [1] is not list([1]), but slice(1) if self.in_object_path: assert isinstance(node.value, list) assert len(node.value) == 1 return yaml.ScalarNode( DEFAULT_SLICE_TAG, f'[{node.value[0].value}]', node.start_mark, node.end_mark, ) return node
def construct_mapping(self, node, deep=False): """Return a mapping between key and value nodes This overrides the default construct_mapping to handle the case when we're parsing Jinja template replacement fields that are for integers, which are bare brackets, e.g., {{ blah }} versus "{{ blah }}". When pyyaml tries to read {{ blah }} with no quotes, it reads that as a mapping, then tries to use the mapping as a key in yet another mapping. Since {{ blah }} isn't hashable, this fails. However, since we don't really want that and instead we want this as a string, convert any unhashable keys to ScalarNodes and try it again. We want the string version anyway since we're only looking for what the variable name is. """ if not isinstance(node, yaml.MappingNode): raise ConstructorError(None, None, "expected a mapping node, but found %s" % node.id, node.start_mark) mapping = {} for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) if not isinstance(key, collections.Hashable): # If we can't hash it, take the value we need out of the # mapping and convert it to a ScalarNode, then retry. old_value = key_node.value[0][0].value new_node = yaml.ScalarNode(tag="tag:yaml.org,2002:str", value='"{{ %s }}"' % old_value) key = self.construct_object(new_node, deep=deep) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping
def format(self, *args, **kwargs): node = yaml.ScalarNode(tag=self._node.tag, value=self._node.value.format(*args, **kwargs)) return self._cls.from_yaml(self._loader, node)
raise Exception( r"Failed to assemble code: Could not find devkitPPC. devkitPPC should be installed to: C:\devkitPro\devkitPPC" ) # Allow yaml to dump OrderedDicts for the diffs. yaml.CDumper.add_representer( OrderedDict, lambda dumper, data: dumper.represent_dict(data.items())) # Change how yaml dumps lists so each element isn't on a separate line. yaml.CDumper.add_representer( list, lambda dumper, data: dumper.represent_sequence( u'tag:yaml.org,2002:seq', data, flow_style=True)) # Output integers as hexadecimal. yaml.CDumper.add_representer( int, lambda dumper, data: yaml.ScalarNode('tag:yaml.org,2002:int', "0x%02X" % data)) temp_dir = tempfile.mkdtemp() print(temp_dir) print() if not os.path.isdir("./asm_patches/patch_diffs"): os.mkdir("./asm_patches/patch_diffs") custom_symbols = OrderedDict() custom_symbols["sys/main.dol"] = OrderedDict() with open("./asm_api/free_space_start_offsets.txt", "r") as f: free_space_start_offsets = yaml.safe_load(f) next_free_space_offsets = {}
def unicode_representer(dumper, uni): node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=str(uni)) return node
import yaml import json def default_ctor(loader, tag_suffix, node): print(loader) print(tag_suffix) print(node) return tag_suffix + " " + node.value yaml.add_multi_constructor("", default_ctor) with open('MARKOV_submission_metadata.yml', 'r') as stream: y = yaml.load(stream) yaml.ScalarNode(tag=u'!binary', value=u'') with open('MARKOV_submission_metadata.json', 'w') as outfile: json.dump(y, outfile, default=str)
def _represent_unicode(_, uni): return yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=uni)
def represent_id(_, data): if is_secret(data.id): return represent_secret(data.id) return yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=data.id)