def test_path_resolver_loader(data_filename, path_filename, verbose=False): _make_path_loader_and_dumper() nodes1 = list(yaml.compose_all(open(data_filename, "rb").read(), Loader=MyLoader)) nodes2 = list(yaml.compose_all(open(path_filename, "rb").read())) try: for node1, node2 in zip(nodes1, nodes2): data1 = _convert_node(node1) data2 = _convert_node(node2) assert data1 == data2, (data1, data2) finally: if verbose: print yaml.serialize_all(nodes1)
def test_path_resolver_dumper(data_filename, path_filename, verbose=False): _make_path_loader_and_dumper() for filename in [data_filename, path_filename]: output = yaml.serialize_all(yaml.compose_all(open(filename, "rb")), Dumper=MyDumper) if verbose: print output nodes1 = yaml.compose_all(output) nodes2 = yaml.compose_all(open(data_filename, "rb")) for node1, node2 in zip(nodes1, nodes2): data1 = _convert_node(node1) data2 = _convert_node(node2) assert data1 == data2, (data1, data2)
def test_path_resolver_loader(data_filename, path_filename, verbose=False): _make_path_loader_and_dumper() nodes1 = list( yaml.compose_all(open(data_filename, 'rb').read(), Loader=MyLoader)) nodes2 = list(yaml.compose_all(open(path_filename, 'rb').read())) try: for node1, node2 in zip(nodes1, nodes2): data1 = _convert_node(node1) data2 = _convert_node(node2) assert data1 == data2, (data1, data2) finally: if verbose: print(yaml.serialize_all(nodes1))
def test_path_resolver_dumper(data_filename, path_filename, verbose=False): _make_path_loader_and_dumper() for filename in [data_filename, path_filename]: output = yaml.serialize_all(yaml.compose_all(open(filename, 'rb')), Dumper=MyDumper) if verbose: print(output) nodes1 = yaml.compose_all(output) nodes2 = yaml.compose_all(open(data_filename, 'rb')) for node1, node2 in zip(nodes1, nodes2): data1 = _convert_node(node1) data2 = _convert_node(node2) assert data1 == data2, (data1, data2)
def index(request): yamlcanon = u""; # YAML in canonical form. yamlerror = u"" # YAML error messages. yamloriginal = u"" # The original text. Shown only for error messages. yamlstate = STATE_GET # The state of the page. ourmap = {"yamlstate": yamlstate, "yamlcanon": yamlcanon, "yamlerror": yamlerror, "yamloriginal": yamloriginal}; if request.method == 'GET': pass; # Everything we need is in ourmap already. else: # POST request. try: ourtarget = request.POST.get("yamlarea"); composition = compose_all(ourtarget, Loader12); yamlcanon = serialize_all(composition, canonical=True, allow_unicode=True); ourmap["yamlstate"] = STATE_POST_YES; if len(yamlcanon) == 0: ourmap["yamlcanon"] = COMMENTSTR else: ourmap["yamlcanon"] = yamlcanon; # PKM2014 - AttributeErrors are now caught. except (YAMLError, AttributeError) as e: ourmap["yamlstate"] = STATE_POST_NO; ourmap["yamlerror"] = e.__str__(); ourmap["yamloriginal"] = ourtarget; return render(request, 'isityaml/isityaml.html', ourmap)
def test_sequence_of_sequence_of_scalar_iteration(self): """ iterate over sequence of sequence of scalars """ someYamlSeqSeq = """ - - a - aa - aaa - - b - bb - bbb - - c - cc - ccc """ fd = StringIO.StringIO(someYamlSeqSeq) num_nodes = 0 for a_node in yaml.compose_all(fd): self.assertIsInstance(a_node, yaml.nodes.SequenceNode) num_nodes += 1 num_sub_seq = 0 list_of_scalars = list() for a_seq in a_node: self.assertIsInstance(a_seq, yaml.nodes.SequenceNode) num_sub_seq += 1 scalars = list() for something in a_seq: self.assertIsInstance(something, yaml.nodes.ScalarNode) self.assertIsInstance(something.value, str) scalars.append(something.value) list_of_scalars.append(scalars) self.assertEqual(list_of_scalars, [["a", "aa", "aaa"], ["b", "bb", "bbb"], ["c", "cc", "ccc"]]) self.assertEqual(num_sub_seq, 3) self.assertEqual(num_nodes, 1)
def index(request): yamlcanon = u""; # YAML in canonical form. yamlerror = u"" # YAML error messages. yamloriginal = u"" # The original text. Shown only for error messages. yamlstate = STATE_GET # The state of the page. ourmap = {"yamlstate": yamlstate, "yamlcanon": yamlcanon, "yamlerror": yamlerror, "yamloriginal": yamloriginal}; if request.method == 'GET': ourmap.update(csrf(request)); context = RequestContext(request, ourmap); else: # POST request. try: ourtarget = request.POST.get("yamlarea"); composition = yaml.compose_all(ourtarget, Loader12); yamlcanon = yaml.serialize_all(composition, canonical=True, allow_unicode=True); ourmap["yamlstate"] = STATE_POST_YES; if len(yamlcanon) == 0: ourmap["yamlcanon"] = COMMENTSTR else: ourmap["yamlcanon"] = yamlcanon; # PKM2014 - AttributeErrors are now caught. except (YAMLError, AttributeError) as e: ourmap["yamlstate"] = STATE_POST_NO; ourmap["yamlerror"] = e.__str__(); ourmap["yamloriginal"] = ourtarget; ourmap.update(csrf(request)); context = RequestContext(request, ourmap); return TemplateResponse(request, 'isityaml/isityaml.html', context=context)
def __init__(self, fname, i_options={}, dptype=None, debug=False): txt = read_file_or_url(fname) try: model_data, hmodel_data = yaml.compose_all(txt, Loader=yaml.BaseLoader) except Exception as ex: print( "Error while parsing YAML file. Probable YAML syntax error in file : ", fname, ) raise ex self.data = hmodel_data self.__model__ = Model(model_data) self.discretization_options = i_options # cache for functions self.__symbols__ = None self.__transition__ = None self.__equilibrium__ = None self.__projection__ = None self.__features__ = None self.debug = debug self.__set_changed__() from dolo.numeric.processes import IIDProcess, ProductProcess self.check() self.dptype = dptype
def read_help_file(self, help_file_path): with utils.utf8_open_for_read(help_file_path) as open_file: for a_node in yaml.compose_all(open_file): if a_node.isMapping(): for topic_name, topic_items_node in a_node.items(): for item_name, item_value_node in topic_items_node.items(): new_item = HelpItemYaml(item_name, item_value_node) self.add_item(new_item, topic_name)
def record_assemblies(stream, assemblies, local_tags=True): for doc in compose_all(stream, Loader=SafeLoader): # Wrap the document in a sequence node so we can apply get_assemblies() # to an assembly at the top level. wrapper = SequenceNode(YAML_SEQ_TAG, [doc]) assemble(wrapper, assemblies, local_tags) return
def read_from_yaml(self, rfd): try: for a_node in yaml.compose_all(rfd): self.read_yaml_node(a_node) except yaml.YAMLError as ye: raise InstlException(" ".join( ("YAML error while reading file", "'"+file_path+"':\n", str(ye)) ), ye) except IOError as ioe: raise InstlException(" ".join(("Failed to read file", "'"+file_path+"'", ":")), ioe)
def load(self, name, input): stream = StringIO.StringIO(input) stream.name = name trees = list(compose_all(stream)) if len(trees) != 1: raise SchemaError("%s: expected a single yaml document, found %s documents" % (name, len(trees))) tree = trees[0] return self.load(tree)
def read_help_file(self, help_file_path): with open_for_read_file_or_url(help_file_path, None) as file_fd: for a_node in yaml.compose_all(file_fd): if a_node.isMapping(): for topic_name, topic_items_node in a_node: for item_name, item_value_node in topic_items_node: newItem = HelpItem(topic_name, item_name) newItem.read_from_yaml(item_value_node) self.help_items[item_name] = newItem
def load(name: str, value: Any, *allowed: Tag) -> SequenceView: if isinstance(value, str): value = StringIO(value) value.name = name result = view(SequenceNode(Tag.SEQUENCE.value, list(compose_all(value))), ViewMode.PYTHON) for r in view(result, ViewMode.NODE): if r.tag not in allowed: raise ValueError("expecting %s, got %s" % (", ".join(t.name for t in allowed), r.node.tag)) return result
def _split_metadata(self, path): with codecs.open(path, encoding='utf-8') as fd: post_content = fd.read() metadata = next(yaml.load_all(post_content)) composer_iter = yaml.compose_all(post_content) composer = next(composer_iter) last_line = composer.end_mark.line + 1 content = '\n'.join(post_content.splitlines()[last_line:]) return metadata, content
def load(self, name, input): "Load data from json or yaml input. The supplied name will appear as the filename in error messages." stream = StringIO.StringIO(input) stream.name = name trees = list(compose_all(stream)) if len(trees) != 1: raise SchemaError( "%s: expected a single yaml document, found %s documents" % (name, len(trees))) tree = trees[0] return self.load(tree)
def test_map_iteration(self): """ iterate over map of sequence of scalars """ someYamlMap = """ A: - a - aa - aaa B: b C: c """ fd = StringIO.StringIO(someYamlMap) num_nodes = 0 for a_node in yaml.compose_all(fd): self.assertIsInstance(a_node, yaml.nodes.MappingNode) num_nodes += 1 num_map_items = 0 self.assertTrue("A" in a_node) self.assertTrue("B" in a_node) self.assertTrue("C" in a_node) self.assertFalse("D" in a_node) # iterate with key/value pair list_of_scalars1 = list() for name, a_seq in a_node: self.assertIsInstance(name, str) self.assertIsInstance(a_seq, yaml.nodes.Node) num_map_items += 1 for something in a_seq: list_of_scalars1.append(something.value) self.assertEqual(sorted(list_of_scalars1), sorted(["a", "aa", "aaa", "b", "c"])) # iterate with iterkeys list_of_scalars2 = list() for name in a_node.keys(): for something in a_node[name]: list_of_scalars2.append(something.value) self.assertEqual(sorted(list_of_scalars1), sorted(list_of_scalars2)) # test "if ... in ..." functionality list_of_scalars3 = list() for name in ("A", "B", "D", "C"): if name in a_node: for something in a_node[name]: list_of_scalars3.append(something.value) self.assertEqual(sorted(list_of_scalars1), sorted(list_of_scalars3)) self.assertEqual(num_map_items, 3) self.assertEqual(num_nodes, 1)
def read_yaml_file(self, file_path): logging.info("%s", file_path) with open_for_read_file_or_url(file_path, self.path_searcher) as file_fd: for a_node in yaml.compose_all(file_fd): if self.is_acceptable_yaml_doc(a_node): if a_node.tag.startswith('!define_const'): self.read_const_defines(a_node) elif a_node.tag.startswith('!define'): self.read_defines(a_node) elif a_node.tag.startswith('!index'): self.read_index(a_node) else: logging.error("Unknown document tag '%s' while reading file %s; Tag should be one of: !define, !index'", a_node.tag, file_path) var_list.get_configVar_obj("__READ_YAML_FILES__").append(file_path)
def test_composer(data_filename, canonical_filename, verbose=False): nodes1 = None nodes2 = None try: nodes1 = list(yaml.compose_all(open(data_filename, 'rb'))) nodes2 = list(yaml.canonical_compose_all(open(canonical_filename, 'rb'))) assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2)) for node1, node2 in zip(nodes1, nodes2): _compare_nodes(node1, node2) finally: if verbose: print "NODES1:" pprint.pprint(nodes1) print "NODES2:" pprint.pprint(nodes2)
def test_composer(data_filename, canonical_filename, verbose=False): nodes1 = None nodes2 = None try: nodes1 = list(yaml.compose_all(open(data_filename, 'rb'))) nodes2 = list(yaml.canonical_compose_all(open(canonical_filename, 'rb'))) assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2)) for node1, node2 in zip(nodes1, nodes2): _compare_nodes(node1, node2) finally: if verbose: print("NODES1:") pprint.pprint(nodes1) print("NODES2:") pprint.pprint(nodes2)
def read_yaml_from_stream(self, the_stream): for a_node in yaml.compose_all(the_stream): if self.is_acceptable_yaml_doc(a_node): if a_node.tag.startswith('!define_const'): self.read_const_defines(a_node) elif a_node.tag.startswith('!define'): self.read_defines(a_node) elif a_node.tag.startswith('!index'): self.read_index(a_node) elif a_node.tag.startswith('!require'): self.read_require(a_node) else: logging.error( "Unknown document tag '%s' while reading file %s; Tag should be one of: !define, !index'", a_node.tag, file_path) if not self.check_version_compatibility(): raise ValueError(var_stack.resolve("Minimal instl version $(INSTL_MINIMAL_VERSION) > current version $(__INSTL_VERSION__); ")+var_stack.get_configVar_obj("INSTL_MINIMAL_VERSION").description)
def test_single_scalar_iteration(self): """ iterate over a single scalar """ someYamlScalar = """ a """ fd = StringIO.StringIO(someYamlScalar) num_nodes = 0 for a_node in yaml.compose_all(fd): self.assertIsInstance(a_node, yaml.nodes.ScalarNode) num_nodes += 1 num_scalars = 0 for something in a_node: # iterate over a scalar as if it was a sequence self.assertIsInstance(something, yaml.nodes.ScalarNode) self.assertIsInstance(something.value, str) num_scalars += 1 self.assertEqual(something.value, "a") self.assertEqual(num_scalars, 1) self.assertEqual(num_nodes, 1)
def load_YAML(stream): """Load a YAML content, returning the data in a nested tuple. Args: stream (Stream): the stream object, str or file. The returned collection, assuming no error occurred, is of the form (line_number, name, value) where `value` can be nested depending on the type of value found in the content. """ content = compose_all(stream) collection = [] for document in content: line = document.start_mark.line + 1 value = read_node(document) collection.append(value) return collection
def transclude_template(stream, assemblies, local_tags=True): documents = [] for doc in compose_all(stream): # Wrap the document in a sequence node so we can apply get_assemblies() # and transclude() to an assembly or transclude at the top level. wrapper = SequenceNode(YAML_SEQ_TAG, [doc]) # Record any assemblies in the document itself, but don't apply # these to other documents. doc_assemblies = assemblies.copy() log.debug("Before transclude: wrapper=%s", wrapper) wrapper = assemble(wrapper, doc_assemblies, local_tags) wrapper = transclude(wrapper, doc_assemblies, local_tags) log.debug("After transclude: wrapper=%s", wrapper) documents.append(wrapper.value[0]) return documents
def test_sequence_of_scalar_iteration(self): """ iterate over sequence of scalars """ someYamlSeq = """ - a - b - c """ fd = StringIO.StringIO(someYamlSeq) num_nodes = 0 for a_node in yaml.compose_all(fd): self.assertIsInstance(a_node, yaml.nodes.SequenceNode) num_nodes += 1 num_scalars = 0 scalars = list() for something in a_node: self.assertIsInstance(something, yaml.nodes.ScalarNode) self.assertIsInstance(something.value, str) num_scalars += 1 scalars.append(something.value) self.assertEqual(scalars, ["a", "b", "c"]) self.assertEqual(num_scalars, 3) self.assertEqual(num_nodes, 1)
def test_sequence_of_sequence_of_scalar_iteration(self): """ iterate over sequence of sequence of scalars """ someYamlSeqSeq = """ - - a - aa - aaa - - b - bb - bbb - - c - cc - ccc """ fd = StringIO.StringIO(someYamlSeqSeq) num_nodes = 0 for a_node in yaml.compose_all(fd): self.assertIsInstance(a_node, yaml.nodes.SequenceNode) num_nodes += 1 num_sub_seq = 0 list_of_scalars = list() for a_seq in a_node: self.assertIsInstance(a_seq, yaml.nodes.SequenceNode) num_sub_seq += 1 scalars = list() for something in a_seq: self.assertIsInstance(something, yaml.nodes.ScalarNode) self.assertIsInstance(something.value, str) scalars.append(something.value) list_of_scalars.append(scalars) self.assertEqual( list_of_scalars, [["a", "aa", "aaa"], ["b", "bb", "bbb"], ["c", "cc", "ccc"]]) self.assertEqual(num_sub_seq, 3) self.assertEqual(num_nodes, 1)
result.write('--- ' + line.split(' ')[2] + '\n') # remove the tag, but keep file ID print('\rGameObject ' + str(count)) count += 1 else: # Just copy the contents... result.write(line) sourceFile.close() return result.getvalue() if len(sys.argv) != 2: print('sceneconverter.py <inputfile>') sys.exit(2) gameobject_tag = 'tag:unity3d.com,2011:29' gameobject2_tag = 'tag:unity3d.com,2011:104' def gameobject_constructor(loader, node): pass print('Importing scene from ' + sys.argv[1]) scene_file = sys.argv[1] yaml.add_constructor(gameobject_tag, gameobject_constructor) yaml.add_constructor(gameobject2_tag, gameobject_constructor) scene_data = removeUnityTagAlias(scene_file) scene_yaml = yaml.compose_all(scene_data) component_dict = {} for entity in scene_yaml: #if 'GameObject' in entity: print(entity)
def canonical_compose_all(stream): return yaml.compose_all(stream, Loader=CanonicalLoader)
def read_yaml_from_stream(self, the_stream, *args, **kwargs): for a_node in yaml.compose_all(the_stream): self.read_yaml_from_node(a_node, *args, **kwargs)
def _load(stream): results = [] for nd in compose_all(stream): results.append(view(nd, LEAF_AS_PYTHON)) return results
def read_yaml_from_stream(self, the_stream, *args, **kwargs): for a_node in yaml.compose_all(the_stream): with kwargs['node-stack'](a_node): self.read_yaml_from_node(a_node, *args, **kwargs)
def document(self): composer_iter = yaml.compose_all(self._content) composer = next(composer_iter) last_line = composer.end_mark.line + 1 return '\n'.join(self._content.splitlines()[last_line:])
retVal = {str(_key.value): nodeToPy(_val) for (_key, _val) in a_node.value} return retVal def nodeToYamlDumpWrap(a_node): retVal = None if a_node.isScalar(): retVal = YamlDumpWrap(str(a_node.value)) elif a_node.isSequence(): seq = [nodeToYamlDumpWrap(item) for item in a_node.value] retVal = YamlDumpWrap(seq) elif a_node.isMapping(): amap = {str(_key.value): nodeToYamlDumpWrap(_val) for (_key, _val) in a_node.value} retVal = YamlDumpWrap(amap) return retVal if __name__ == "__main__": try: import pyinstl.utils for afile in sys.argv[1:]: with pyinstl.utils.open_for_read_file_or_url(afile) as fd: for a_node in yaml.compose_all(fd): a_node_as_tdw = nodeToYamlDumpWrap(a_node) docWrap = YamlDumpDocWrap(a_node_as_tdw) writeAsYaml(docWrap) except Exception as ex: import traceback tb = traceback.format_exc() print(tb)
def compose(self): """ " Parse the first YAML document in a stream. " and produce the corresponding representation tree. """ return yaml.compose_all(self.data, Loader=Loader)
def nodeToYamlDumpWrap(a_node): retVal = None if a_node.isScalar(): retVal = YamlDumpWrap(str(a_node.value)) elif a_node.isSequence(): seq = [nodeToYamlDumpWrap(item) for item in a_node.value] retVal = YamlDumpWrap(seq) elif a_node.isMapping(): amap = { str(_key.value): nodeToYamlDumpWrap(_val) for (_key, _val) in a_node.value } retVal = YamlDumpWrap(amap) return retVal if __name__ == "__main__": try: import utils for afile in sys.argv[1:]: with utils.open_for_read_file_or_url( afile, config_vars=None) as open_file: for a_node in yaml.compose_all(open_file.fd): a_node_as_tdw = nodeToYamlDumpWrap(a_node) docWrap = YamlDumpDocWrap(a_node_as_tdw) writeAsYaml(docWrap) except Exception as ex: import traceback tb = traceback.format_exc() print(tb)
def read_yaml_from_stream(self, the_stream, *args, **kwargs): for a_node in yaml.compose_all(the_stream): self.init_specific_doc_readers() # in case previous reading changed the assigned readers (ACCEPTABLE_YAML_DOC_TAGS) read_func = self.get_read_function_for_doc(a_node) if read_func is not None: read_func(a_node, *args, **kwargs)