def strip_anchors(istream, ostream): """ This method parses a yaml input stream, and outputs a similar stream where YAML anchors and references have been "flattened", such that the syntax is compatible with the JSON format. Concretely, we translate: key1: !tag:desc &anchor_name { attribute1: val1 }, key2: *anchor_name, to: key1: !tag:desc &anchor { __anchor__: anchor_name, attribute1: val1, }, key2: { __ref__: anchor_name}, """ events = [] for event in yaml.parse(istream): if (type(event) is yaml.MappingStartEvent and event.anchor): anchor_event = key_value_events('__anchor__', event.anchor) event.anchor = None events += [event] + anchor_event elif (type(event) is yaml.ScalarEvent and event.anchor is not None): events += anchor_events(event.anchor, event.value) elif (type(event) is yaml.AliasEvent): events += alias_events('__ref__', event.anchor) else: events += [event] yaml.emit(events, stream=ostream) ostream.seek(0)
def test_emitter_error(error_filename, verbose=False): events = list(yaml.load(open(error_filename, 'rb'), Loader=test_emitter.EventsLoader)) try: yaml.emit(events) except yaml.YAMLError, exc: if verbose: print "%s:" % exc.__class__.__name__, exc
def test_emitter_error(error_filename, verbose=False): events = list( yaml.load(open(error_filename, 'rb'), Loader=test_emitter.EventsLoader)) try: yaml.emit(events) except yaml.YAMLError, exc: if verbose: print "%s:" % exc.__class__.__name__, exc
def test_emitter_error(error_filename, verbose=False): events = list(yaml.load(open(error_filename, 'rb'), Loader=test_emitter.EventsLoader)) try: yaml.emit(events) except yaml.YAMLError as exc: if verbose: print("%s:" % exc.__class__.__name__, exc) else: raise AssertionError("expected an exception")
def test_emitter_error(error_filename, verbose=False): with open(error_filename, 'rb') as file: events = list(yaml.load(file, Loader=test_emitter.EventsLoader)) try: yaml.emit(events) except yaml.YAMLError as exc: if verbose: print("%s:" % exc.__class__.__name__, exc) else: raise AssertionError("expected an exception")
def update_compact_files(self, ): """Update compact data files from update data files""" for file_path, updates in self._updates.items(): if os.path.exists(file_path): with open_temp_copy(file_path, binary=True) as instream, open( file_path, 'wb') as outstream: updated_events = self._updated_compact_events( yaml.parse(instream), updates) yaml.emit(updated_events, outstream) else: with open(file_path, 'wb') as outstream: yaml.emit(self._fresh_content_events(updates.items()), outstream)
def _compare_emitters(data, verbose): events = list(yaml.parse(data, Loader=yaml.PyLoader)) c_data = yaml.emit(events, Dumper=yaml.CDumper) if verbose: print c_data py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader)) c_events = list(yaml.parse(c_data, Loader=yaml.CLoader)) try: assert len(events) == len(py_events), (len(events), len(py_events)) assert len(events) == len(c_events), (len(events), len(c_events)) for event, py_event, c_event in zip(events, py_events, c_events): for attribute in [ '__class__', 'anchor', 'tag', 'implicit', 'value', 'explicit', 'version', 'tags' ]: value = getattr(event, attribute, None) py_value = getattr(py_event, attribute, None) c_value = getattr(c_event, attribute, None) if attribute == 'tag' and value in [None, u'!'] \ and py_value in [None, u'!'] and c_value in [None, u'!']: continue if attribute == 'explicit' and (py_value or c_value): continue assert value == py_value, (event, py_event, attribute) assert value == c_value, (event, c_event, attribute) finally: if verbose: print "EVENTS:" pprint.pprint(events) print "PY_EVENTS:" pprint.pprint(py_events) print "C_EVENTS:" pprint.pprint(c_events)
def test_emitter_styles(data_filename, canonical_filename, verbose=False): for filename in [data_filename, canonical_filename]: events = list(yaml.parse(open(filename, 'rb'))) for flow_style in [False, True]: for style in ['|', '>', '"', '\'', '']: styled_events = [] for event in events: if isinstance(event, yaml.ScalarEvent): event = yaml.ScalarEvent(event.anchor, event.tag, event.implicit, event.value, style=style) elif isinstance(event, yaml.SequenceStartEvent): event = yaml.SequenceStartEvent(event.anchor, event.tag, event.implicit, flow_style=flow_style) elif isinstance(event, yaml.MappingStartEvent): event = yaml.MappingStartEvent(event.anchor, event.tag, event.implicit, flow_style=flow_style) styled_events.append(event) output = yaml.emit(styled_events) if verbose: print("OUTPUT (filename=%r, flow_style=%r, style=%r)" % (filename, flow_style, style)) print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def _compare_emitters(data, verbose): events = list(yaml.parse(data, Loader=yaml.PyLoader)) c_data = yaml.emit(events, Dumper=yaml.CDumper) if verbose: print c_data py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader)) c_events = list(yaml.parse(c_data, Loader=yaml.CLoader)) try: assert len(events) == len(py_events), (len(events), len(py_events)) assert len(events) == len(c_events), (len(events), len(c_events)) for event, py_event, c_event in zip(events, py_events, c_events): for attribute in ['__class__', 'anchor', 'tag', 'implicit', 'value', 'explicit', 'version', 'tags']: value = getattr(event, attribute, None) py_value = getattr(py_event, attribute, None) c_value = getattr(c_event, attribute, None) if attribute == 'tag' and value in [None, u'!'] \ and py_value in [None, u'!'] and c_value in [None, u'!']: continue if attribute == 'explicit' and (py_value or c_value): continue assert value == py_value, (event, py_event, attribute) assert value == c_value, (event, c_event, attribute) finally: if verbose: print "EVENTS:" pprint.pprint(events) print "PY_EVENTS:" pprint.pprint(py_events) print "C_EVENTS:" pprint.pprint(c_events)
def _testCEmitter(self, test_name, data_filename, canonical_filename, Loader=yaml.Loader): data1 = file(data_filename, 'r').read() events = list(yaml.parse(data1, Loader=Loader)) data2 = yaml.emit(events, Dumper=yaml.CDumper) ext_events = [] try: for event in yaml.parse(data2): ext_events.append(event) self.failUnlessEqual(len(events), len(ext_events)) for event, ext_event in zip(events, ext_events): self.failUnlessEqual(event.__class__, ext_event.__class__) if hasattr(event, 'anchor'): self.failUnlessEqual(event.anchor, ext_event.anchor) if hasattr(event, 'tag'): if not (event.tag in ['!', None] and ext_event.tag in ['!', None]): self.failUnlessEqual(event.tag, ext_event.tag) if hasattr(event, 'implicit'): self.failUnlessEqual(event.implicit, ext_event.implicit) if hasattr(event, 'value'): self.failUnlessEqual(event.value, ext_event.value) if hasattr(event, 'explicit'): self.failUnlessEqual(event.explicit, ext_event.explicit) if hasattr(event, 'version'): self.failUnlessEqual(event.version, ext_event.version) if hasattr(event, 'tags'): self.failUnlessEqual(event.tags, ext_event.tags) except: print print "DATA1:" print data1 print "DATA2:" print data2 print "EVENTS:", events print "EXT_EVENTS:", ext_events raise
def test_emitter_on_data(data_filename, canonical_filename, verbose=False): events = list(yaml.parse(open(data_filename, 'rb'))) output = yaml.emit(events) if verbose: print("OUTPUT:") print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def test_emitter_events(events_filename, verbose=False): events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) output = yaml.emit(events) if verbose: print("OUTPUT:") print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def test_emitter_events(events_filename, verbose=False): events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) output = yaml.emit(events) if verbose: print "OUTPUT:" print output new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def test_emitter_on_canonical(canonical_filename, verbose=False): events = list(yaml.parse(open(canonical_filename, 'rb'))) for canonical in [False, True]: output = yaml.emit(events, canonical=canonical) if verbose: print("OUTPUT (canonical=%s):" % canonical) print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def test_unicode_transfer(unicode_filename, verbose=False): data = open(unicode_filename, 'rb').read().decode('utf-8') for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: input = data if encoding is not None: input = (u'\ufeff'+input).encode(encoding) output1 = yaml.emit(yaml.parse(input), allow_unicode=True) stream = StringIO.StringIO() yaml.emit(yaml.parse(input), _unicode_open(stream, 'utf-8'), allow_unicode=True) output2 = stream.getvalue() if encoding is None: assert isinstance(output1, unicode), (type(output1), encoding) else: assert isinstance(output1, str), (type(output1), encoding) output1.decode(encoding) assert isinstance(output2, str), (type(output2), encoding) output2.decode('utf-8')
def test_emitter_events(events_filename, verbose=False): with open(events_filename, 'rb') as file: events = list(yaml.load(file, Loader=EventsLoader)) output = yaml.emit(events) if verbose: print("OUTPUT:") print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def test_emitter_events(events_filename, verbose=False): events = tuple(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) output = yaml.emit(events) if verbose: print("OUTPUT:", events_filename) print(output) new_events = list(yaml.parse(output)) no_comments = filter(lambda e: not isinstance(e, yaml.CommentEvent), events) _compare_events(list(no_comments), new_events)
def test_unicode_transfer(unicode_filename, verbose=False): data = open(unicode_filename, 'rb').read().decode('utf-8') for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: input = data if encoding is not None: input = (u'\ufeff'+input).encode(encoding) output1 = yaml.emit(yaml.parse(input), allow_unicode=True) if encoding is None: stream = io.StringIO() else: stream = io.BytesIO() yaml.emit(yaml.parse(input), stream, allow_unicode=True) output2 = stream.getvalue() assert isinstance(output1, basestring) if encoding is None: assert isinstance(output2, text_type) else: assert isinstance(output2, bytes) output2.decode(encoding)
def test_unicode_transfer(unicode_filename, verbose=False): with open(unicode_filename, 'rb') as file: data = file.read().decode('utf-8') for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: input = data if encoding is not None: input = ('\ufeff' + input).encode(encoding) output1 = yaml.emit(yaml.parse(input), allow_unicode=True) if encoding is None: stream = io.StringIO() else: stream = io.BytesIO() yaml.emit(yaml.parse(input), stream, allow_unicode=True) output2 = stream.getvalue() assert isinstance(output1, str), (type(output1), encoding) if encoding is None: assert isinstance(output2, str), (type(output1), encoding) else: assert isinstance(output2, bytes), (type(output1), encoding) output2.decode(encoding)
def _StoreYAMLToStream(self, stream): events = [ yaml.StreamStartEvent(encoding='ascii'), yaml.DocumentStartEvent(explicit=True), yaml.SequenceStartEvent(anchor=None, tag=None, implicit=True, flow_style=False), ] # Serialize all elements in order for e in self.__elements: events.extend(e._GetYAMLEvents()) events.extend([ yaml.SequenceEndEvent(), yaml.DocumentEndEvent(explicit=True), yaml.StreamEndEvent(), ]) yaml.emit(events, stream)
def with_current_augmentation(self, stream): """Append the full test case with its current augmentation data to the target file :param stream: A file-like object (which could be passed to :func:`yaml.parse`) The *stream* contains YAML identifying the test case in question. The identifying YAML from the test case _plus_ the augmentative key/value pairs as currently defined in the augmenting data files will be written to the file :attr:`file_name`. """ if stream.isatty(): print( "Input test cases from interface, ending with a line containing only '...':" ) buffered_input = StringIO() for line in stream: if line.rstrip() == "...": break buffered_input.write(line) buffered_input.seek(0) stream = buffered_input id_list_reader = CaseIdListReader( self._case_augmenter.CASE_PRIMARY_KEYS, safe_loading=self.safe_loading) for event in yaml.parse(stream): test_case = id_list_reader.read(event) if test_case is None: continue # Look up augmentation for case_id case_as_currently_augmented_events = ( self._case_augmenter.augmented_test_case_events(*test_case)) # Append augmentation case to self.file_name with open(self.file_name, 'ab') as outstream: yaml.emit( self._case_yaml_events(case_as_currently_augmented_events), outstream, )
def load_stack(name, extension=".stack"): """Loads a stack from a file on the PYTHONPATH Inputs: name: stackname - file same name must be found on path extension: entension of language that the stack file is written in Output: Python Data structure representing the stack load_stack will recusively load any included stack files """ return yaml.load(yaml.emit(_load_stack(name, extension)))
def save(): if self.correct == True: try: content = None if (type == PROPERTIES) or (type == CNF): content = writeProperties() elif type == JSON: content = json.dumps(config) elif type == YAML: content = yaml.emit(config) elif type == SERIALIZED: content = pickle.dumps(self.config) elif type == ENUM: "\r\n".join(config.keys()) else: correct = False return False except ValueError: logger.log('error', f'Could not save Config {self.file}') return True else: return false
def save(self) -> None: if self.correct == True: try: content = None if (self._type == self.PROPERTIES) or (self._type == self.CNF): content = writeProperties() elif self._type == self.JSON: content = json.dumps(config) elif self._type == self.YAML: content = yaml.emit(config) elif self._type == self.SERIALIZED: content = pickle.dumps(self.config) elif self._type == self.ENUM: "\r\n".join(config.keys()) else: correct = False return False except ValueError: Logger.log('error', 'Could not save Config' + str(self.file)) return True else: return false
def save_yaml(dest, data, header, str_labels=False): with open(dest, 'w') as f: yaml.emit(gff2yamlevents(data, header, str_labels), f)
def _process_stack(events, extension=".stack"): """Process the stack implementing any stack specific features Is called recursively to deal with includes """ lines = [] in_package = False in_package_manager = False mapping_count = 0 last_scalar = '' package_manager_lines = [] in_inline = False add = True # First Pass # Process the includes for e in events: if in_inline: in_inline = False add = False lines = lines + _load_stack(e.value,extension)[3:-3] if (e.__class__ == yaml.events.ScalarEvent and e.value == 'inline'): in_inline = True add = False if add: lines.append(e) add = True events = copy(lines) lines = [] in_package = False in_package_manager = False mapping_count = 0 last_scalar = '' package_manager_lines = [] in_inline = False add = True # Second Pass # - Process Automatic Anchors for packages # - Capture package-mangers for use in second pass for e in events: if (e.__class__ == yaml.events.StreamStartEvent or e.__class__ == yaml.events.DocumentStartEvent or e.__class__ == yaml.events.DocumentEndEvent or e.__class__ == yaml.events.StreamEndEvent): package_manager_lines.append(deepcopy(e)) if in_package: try: if last_scalar and not e.anchor: e.anchor = last_scalar except: pass #always reset last scalar last_scalar = '' if e.__class__ == yaml.events.MappingStartEvent: mapping_count += 1 if (e.__class__ == yaml.events.ScalarEvent and e.value and mapping_count == 2): last_scalar = e.value if e.__class__ == yaml.events.MappingEndEvent: mapping_count -= 1 if not mapping_count: in_package = False if in_package_manager: if e.__class__ == yaml.events.MappingStartEvent: mapping_count += 1 if e.__class__ == yaml.events.MappingEndEvent: mapping_count -= 1 package_manager_lines.append(deepcopy(e)) if not mapping_count: in_package_manager = False package_manager_lines.append(deepcopy(e)) if (e.__class__ == yaml.events.ScalarEvent and e.value == 'packages'): mapping_count = 0 in_package = True if (e.__class__ == yaml.events.ScalarEvent and e.value == 'package-managers'): mapping_count = 0 in_package_manager = True package_manager_lines.append(yaml.events.MappingStartEvent(None,None,True)) package_manager_lines.append(deepcopy(e)) if add: lines.append(e) add = True # Third Pass # - Process auto fill of package names if len(package_manager_lines) == 4: package_manager_lines = [package_manager_lines[0]] +[package_manager_lines[3]] try: package_managers = yaml.load(yaml.emit(package_manager_lines)).get("package-managers",None) except: package_managers = None if package_managers: in_package = False mapping_count = 0 current_package = '' current_managers = {} add = True events = copy(lines) last_scalar = '' current_platform = '' lines = [] for e in events: if in_package: if e.__class__ == yaml.events.MappingStartEvent: mapping_count += 1 if mapping_count == 2: if e.__class__ == yaml.events.MappingStartEvent: current_platform = last_scalar if (mapping_count == 2 and e.__class__ == yaml.events.ScalarEvent): if e.value: current_package = e.value else: lines.append(yaml.events.MappingStartEvent(e.anchor,None,True)) for pm in package_managers[current_platform].keys(): lines.append(yaml.events.ScalarEvent(None, None,(True,False),pm)) lines.append(yaml.events.ScalarEvent(None, None,(True,False),current_package)) lines.append(yaml.events.MappingEndEvent()) add = False if mapping_count == 3: if (e.__class__ == yaml.events.ScalarEvent and e.value): current_managers[e.value] = None if e.__class__ == yaml.events.MappingEndEvent: for pm in package_managers[current_platform].keys(): if not current_managers.has_key(pm): lines.append(yaml.events.ScalarEvent(None, None,(True,False),pm)) lines.append(yaml.events.ScalarEvent(None, None,(True,False),current_package)) current_managers = {} if e.__class__ == yaml.events.MappingEndEvent: mapping_count -= 1 if mapping_count == 0: in_package = False if (e.__class__ == yaml.events.ScalarEvent and e.value == "packages"): in_package = True if e.__class__ == yaml.events.ScalarEvent: last_scalar = e.value if add: lines.append(e) add = True #for e in lines: #yaml.parse(yaml.emit(lines)): #print e #print yaml.emit(lines) #print "" return lines
def test_empty(self): empty = """""" out = yaml.load(yaml.emit(_process_stack(empty))) self.assertEqual(out, None)
def test_single_empty_mapping(self): single_empty_mapping = """\ blah: """ out = yaml.load(yaml.emit(_process_stack(yaml.parse(single_empty_mapping)))) self.assertEqual(out["blah"], None)
def test_mapping_with_item(self): single_mapping = """\ blah: not-blah """ out = yaml.load(yaml.emit(_process_stack(yaml.parse(single_mapping)))) self.assertEqual(out["blah"], "not-blah")
def gen_yaml(stream, emit_i): parent = None curr_i = 0 events = {} tmp_events = [] nodes_req, nodes_left = 0, 0 doc_ses = [yaml.StreamStartEvent(), yaml.DocumentStartEvent()] doc_ees = [yaml.DocumentEndEvent(), yaml.StreamEndEvent()] def get_events(level = 0): if level not in events: return tmp_events else: es, end_es = events[level] ret = es + get_events(level + 1) + list(end_es) if level == 0: ret = doc_ses + ret + doc_ees return ret for event in yaml.parse(stream, Loader=yaml.CLoader): if isinstance(event, (yaml.DocumentStartEvent, yaml.StreamStartEvent, yaml.DocumentEndEvent, yaml.StreamEndEvent)): continue if curr_i not in events: events[curr_i] = [], deque() es, end_es = events[curr_i] # Update the indentation if isinstance(event, yaml.SequenceStartEvent): if curr_i < emit_i: es.append(event) end_es.appendleft(yaml.SequenceEndEvent()) else: tmp_events.append(event) curr_i += 1 if curr_i == emit_i: nodes_req, nodes_left = 1, 1 elif isinstance(event, yaml.MappingStartEvent): if curr_i < emit_i: es.append(event) end_es.appendleft(yaml.MappingEndEvent()) else: tmp_events.append(event) curr_i += 1 if curr_i == emit_i: nodes_req, nodes_left = 2, 2 elif isinstance(event, yaml.SequenceEndEvent): if curr_i >= emit_i: tmp_events.append(event) curr_i -= 1 if curr_i == emit_i: nodes_left -= 1 if curr_i < emit_i: del events[curr_i] tmp_events = [] elif isinstance(event, yaml.MappingEndEvent): if curr_i >= emit_i: tmp_events.append(event) curr_i -= 1 if curr_i == emit_i: nodes_left -= 1 if curr_i < emit_i: del events[curr_i] tmp_events = [] else: if curr_i < emit_i: es.append(event) else: tmp_events.append(event) if curr_i == emit_i: nodes_left -= 1 if emit_i == curr_i and nodes_left <= 0: doc = yaml.load(yaml.emit(get_events(), Dumper=yaml.CDumper), Loader=yaml.CLoader) yield doc tmp_events = [] nodes_left = nodes_req
for evt in emit_struct(value, False): yield evt else: for evt in emit_primitive(value, False): yield evt else: if value is None: yield SE(tag='!!null', value='') else: for evt in emit_value(value, vtype, False): yield evt yield yaml.DocumentStartEvent() for evt in emit_struct(data): yield evt yield yaml.DocumentEndEvent() yield yaml.StreamEndEvent() def save_yaml(dest, data, header, str_labels=False): with open(dest, 'w') as f: yaml.emit(gff2yamlevents(data, header, str_labels), f) if __name__ == '__main__': import sys with open(sys.argv[1], 'rb') as f: data, header = read_gff4(f) yaml.emit(gff2yamlevents(data, header, True), sys.stdout)