def _compare_emitters(data, verbose): events = list(yaml.parse(data, Loader=yaml.PyLoader)) c_data = yaml.emit(events, Dumper=yaml.CDumper) if verbose: print c_data py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader)) c_events = list(yaml.parse(c_data, Loader=yaml.CLoader)) try: assert len(events) == len(py_events), (len(events), len(py_events)) assert len(events) == len(c_events), (len(events), len(c_events)) for event, py_event, c_event in zip(events, py_events, c_events): for attribute in [ '__class__', 'anchor', 'tag', 'implicit', 'value', 'explicit', 'version', 'tags' ]: value = getattr(event, attribute, None) py_value = getattr(py_event, attribute, None) c_value = getattr(c_event, attribute, None) if attribute == 'tag' and value in [None, u'!'] \ and py_value in [None, u'!'] and c_value in [None, u'!']: continue if attribute == 'explicit' and (py_value or c_value): continue assert value == py_value, (event, py_event, attribute) assert value == c_value, (event, c_event, attribute) finally: if verbose: print "EVENTS:" pprint.pprint(events) print "PY_EVENTS:" pprint.pprint(py_events) print "C_EVENTS:" pprint.pprint(c_events)
def _testCEmitter(self, test_name, data_filename, canonical_filename, Loader=yaml.Loader): data1 = file(data_filename, 'r').read() events = list(yaml.parse(data1, Loader=Loader)) data2 = yaml.emit(events, Dumper=yaml.CDumper) ext_events = [] try: for event in yaml.parse(data2): ext_events.append(event) self.failUnlessEqual(len(events), len(ext_events)) for event, ext_event in zip(events, ext_events): self.failUnlessEqual(event.__class__, ext_event.__class__) if hasattr(event, 'anchor'): self.failUnlessEqual(event.anchor, ext_event.anchor) if hasattr(event, 'tag'): if not (event.tag in ['!', None] and ext_event.tag in ['!', None]): self.failUnlessEqual(event.tag, ext_event.tag) if hasattr(event, 'implicit'): self.failUnlessEqual(event.implicit, ext_event.implicit) if hasattr(event, 'value'): self.failUnlessEqual(event.value, ext_event.value) if hasattr(event, 'explicit'): self.failUnlessEqual(event.explicit, ext_event.explicit) if hasattr(event, 'version'): self.failUnlessEqual(event.version, ext_event.version) if hasattr(event, 'tags'): self.failUnlessEqual(event.tags, ext_event.tags) except: print print "DATA1:" print data1 print "DATA2:" print data2 print "EVENTS:", events print "EXT_EVENTS:", ext_events raise
def _compare_emitters(data, verbose): events = list(yaml.parse(data, Loader=yaml.PyLoader)) c_data = yaml.emit(events, Dumper=yaml.CDumper) if verbose: print c_data py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader)) c_events = list(yaml.parse(c_data, Loader=yaml.CLoader)) try: assert len(events) == len(py_events), (len(events), len(py_events)) assert len(events) == len(c_events), (len(events), len(c_events)) for event, py_event, c_event in zip(events, py_events, c_events): for attribute in ['__class__', 'anchor', 'tag', 'implicit', 'value', 'explicit', 'version', 'tags']: value = getattr(event, attribute, None) py_value = getattr(py_event, attribute, None) c_value = getattr(c_event, attribute, None) if attribute == 'tag' and value in [None, u'!'] \ and py_value in [None, u'!'] and c_value in [None, u'!']: continue if attribute == 'explicit' and (py_value or c_value): continue assert value == py_value, (event, py_event, attribute) assert value == c_value, (event, c_event, attribute) finally: if verbose: print "EVENTS:" pprint.pprint(events) print "PY_EVENTS:" pprint.pprint(py_events) print "C_EVENTS:" pprint.pprint(c_events)
def test_emitter_styles(data_filename, canonical_filename, verbose=False): for filename in [data_filename, canonical_filename]: events = list(yaml.parse(open(filename, 'rb'))) for flow_style in [False, True]: for style in ['|', '>', '"', '\'', '']: styled_events = [] for event in events: if isinstance(event, yaml.ScalarEvent): event = yaml.ScalarEvent(event.anchor, event.tag, event.implicit, event.value, style=style) elif isinstance(event, yaml.SequenceStartEvent): event = yaml.SequenceStartEvent(event.anchor, event.tag, event.implicit, flow_style=flow_style) elif isinstance(event, yaml.MappingStartEvent): event = yaml.MappingStartEvent(event.anchor, event.tag, event.implicit, flow_style=flow_style) styled_events.append(event) output = yaml.emit(styled_events) if verbose: print("OUTPUT (filename=%r, flow_style=%r, style=%r)" % (filename, flow_style, style)) print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def _testCParser(self, test_name, data_filename, canonical_filename, Loader=yaml.Loader): data = file(data_filename, 'r').read() events = list(yaml.parse(data, Loader=Loader)) ext_events = [] try: for event in yaml.parse(data, Loader=yaml.CLoader): ext_events.append(event) #print "EVENT:", event self.failUnlessEqual(len(events), len(ext_events)) for event, ext_event in zip(events, ext_events): self.failUnlessEqual(event.__class__, ext_event.__class__) if hasattr(event, 'anchor'): self.failUnlessEqual(event.anchor, ext_event.anchor) if hasattr(event, 'tag'): self.failUnlessEqual(event.tag, ext_event.tag) if hasattr(event, 'implicit'): self.failUnlessEqual(event.implicit, ext_event.implicit) if hasattr(event, 'value'): self.failUnlessEqual(event.value, ext_event.value) if hasattr(event, 'explicit'): self.failUnlessEqual(event.explicit, ext_event.explicit) if hasattr(event, 'version'): self.failUnlessEqual(event.version, ext_event.version) if hasattr(event, 'tags'): self.failUnlessEqual(event.tags, ext_event.tags) except: print print "DATA:" print file(data_filename, 'rb').read() print "EVENTS:", events print "EXT_EVENTS:", ext_events raise
def test_emitter_on_data(data_filename, canonical_filename, verbose=False): events = list(yaml.parse(open(data_filename, 'rb'))) output = yaml.emit(events) if verbose: print("OUTPUT:") print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def test_emitter_on_canonical(canonical_filename, verbose=False): events = list(yaml.parse(open(canonical_filename, 'rb'))) for canonical in [False, True]: output = yaml.emit(events, canonical=canonical) if verbose: print("OUTPUT (canonical=%s):" % canonical) print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def extractMessages(fileobj, keywords, commentTags, options): """Extract Babel messages out of a YAML file""" currentArgs = None currentKey = None for event in yaml.parse(fileobj): if isinstance(event, yaml.events.MappingStartEvent): if event.tag == translatableStringTag: currentArgs = {} elif isinstance(event, yaml.events.MappingEndEvent) and currentArgs: try: message = currentArgs['context'] + '|' + currentArgs['message'] except KeyError: message = currentArgs['message'] try: comments = [currentArgs['comment']] except KeyError: comments = [] yield event.start_mark.line, '_', message, comments currentArgs = None elif isinstance(event, yaml.events.ScalarEvent): if currentArgs is not None: if currentKey is None: currentKey = event.value else: currentArgs[currentKey] = event.value currentKey = None elif event.tag == translatableStringTag: yield event.start_mark.line, '_', event.value, []
def index(paths, key_fields, *, safe_loading=True): result = {} indexer = Indexer(key_fields, safe_loading=safe_loading) for path in paths: case_index = itertools.count(0) with open(path) as instream: for event in yaml.parse(instream): entry = indexer.read(event) if entry is not None: case_key, offset = entry new_augmenter = TestCaseAugmenter( path, offset, key_fields, case_index=next(case_index), safe_loading=safe_loading) new_augmenter.safe_loading = safe_loading if case_key in result and result[ case_key].file_path != path: raise MultipleAugmentationEntriesError( "case {} conflicts with case {}".format( new_augmenter.case_reference, result[case_key].case_reference, )) result[case_key] = new_augmenter return result
def update_invtypes(filepath: str): # this might be better off writing a specific parser for performance, yaml is really slow inv_type = None att_name = None subatt_name = None mapping_count = 0 filename = filepath if not path.isfile(filename): return if filename.rsplit('.', 1)[1] == "yaml": f = open(filename, 'rb') elif filename.rsplit('.', 1)[1] == "bz2": f = BZ2File(filename) else: return for ev in yaml.parse(f): if isinstance(ev, MappingStartEvent): mapping_count += 1 elif isinstance(ev, ScalarEvent): if mapping_count == 1: inv_type = InvType() inv_type.typeID = int(ev.value) if mapping_count == 2: if att_name is None: att_name = ev.value else: if att_name == "groupID": inv_type.groupID = int(ev.value) elif att_name == "marketGroupID": inv_type.marketGroupID = int(ev.value) att_name = None if mapping_count == 3: # when it gets where att_name should be the value of the upper thing # should probably just put stuff into a list if subatt_name is None: subatt_name = ev.value else: # we have the value if att_name == 'name' and subatt_name == 'en': inv_type.typeName = ev.value elif att_name == 'description' and subatt_name == 'en': inv_type.description = ev.value subatt_name = None elif isinstance(ev, MappingEndEvent): if mapping_count == 3: att_name = None subatt_name = None elif mapping_count == 2: att_name = None db.session.merge(inv_type) mapping_count -= 1 f.close() db.session.commit() db.session.close()
def send_event(_: Info, project_id: str, assistant_id: str, url: str, file: str, event_format: str, token: str): """Send an event to an assistant """ KodexaPlatform.set_access_token(token) KodexaPlatform.set_url(url) obj = None if file is None: print("Reading from stdin") if event_format == 'yaml': obj = yaml.parse(sys.stdin.read()) elif event_format == 'json': obj = json.loads(sys.stdin.read()) else: raise Exception("You must provide a format if using stdin") else: print("Reading event from file", file) with open(file, 'r') as f: if file.lower().endswith('.json'): obj = json.load(f) elif file.lower().endswith('.yaml'): obj = yaml.full_load(f) else: raise Exception("Unsupported file type") print("Sending event") KodexaPlatform.send_event(project_id, assistant_id, obj) print("Event sent :tada:")
def rest(self, content): for event in yaml.parse(content): if type(event) is yaml.DocumentEndEvent and event.explicit: position = event.end_mark.pointer return content[position:] return None
def yaml2html(stream, loader=yaml.SafeLoader): builder = HTMLBuilder() for event in yaml.parse(stream, loader): builder.process(event) if isinstance(event, yaml.DocumentEndEvent): yield builder.html builder = HTMLBuilder()
def highlight(self): input = str(self.input.read()) substitutions = self.style.substitutions tokens = yaml.scan(input) events = yaml.parse(input) markers = [] number = 0 for token in tokens: number += 1 if token.start_mark.index != token.end_mark.index: cls = token.__class__ if substitutions is not None: if (cls, -1) in substitutions: markers.append([ token.start_mark.index, +2, number, substitutions[cls, -1] ]) if (cls, +1) in substitutions: markers.append([ token.end_mark.index, -2, number, substitutions[cls, +1] ]) number = 0 for event in events: number += 1 cls = event.__class__ if substitutions is not None: if (cls, -1) in substitutions: markers.append([ event.start_mark.index, +1, number, substitutions[cls, -1] ]) if (cls, +1) in substitutions: markers.append([ event.end_mark.index, -1, number, substitutions[cls, +1] ]) markers.sort() markers.reverse() chunks = [] position = len(input) for index, weight1, weight2, substitution in markers: if index < position: chunk = input[index:position] for substring, replacement in self.style.replaces: chunk = chunk.replace(substring, replacement) chunks.append(chunk) position = index chunks.append(substitution) chunks.reverse() result = u''.join(chunks) if self.style.header is not None: self.output.write(self.style.header) try: self.output.write(result.encode('utf-8')) except Exception: self.output.write(str(result)) #self.output.write(result) if self.style.footer is not None: self.output.write(self.style.footer)
def get_yaml_syntax_error(buffer): try: list(yaml.parse(buffer, Loader=yaml.BaseLoader)) except yaml.error.MarkedYAMLError as e: return str(e) return ""
def strip_anchors(istream, ostream): """ This method parses a yaml input stream, and outputs a similar stream where YAML anchors and references have been "flattened", such that the syntax is compatible with the JSON format. Concretely, we translate: key1: !tag:desc &anchor_name { attribute1: val1 }, key2: *anchor_name, to: key1: !tag:desc &anchor { __anchor__: anchor_name, attribute1: val1, }, key2: { __ref__: anchor_name}, """ events = [] for event in yaml.parse(istream): if (type(event) is yaml.MappingStartEvent and event.anchor): anchor_event = key_value_events('__anchor__', event.anchor) event.anchor = None events += [event] + anchor_event elif (type(event) is yaml.ScalarEvent and event.anchor is not None): events += anchor_events(event.anchor, event.value) elif (type(event) is yaml.AliasEvent): events += alias_events('__ref__', event.anchor) else: events += [event] yaml.emit(events, stream=ostream) ostream.seek(0)
def comp_dynamics( obj, mol_relaxations, linear_dynamics, scattering_function, infile, outfile ) -> None: """Compute dynamic properties for a single input file.""" outfile = Path(outfile) infile = Path(infile) # Create output directory where it doesn't already exists outfile.parent.mkdir(parents=True, exist_ok=True) if linear_dynamics: obj["linear_steps"] = None if mol_relaxations is not None: relaxations = yaml.parse(mol_relaxations) else: relaxations = None logger.debug("Processing: %s", infile) process_file( infile=infile, mol_relaxations=relaxations, outfile=outfile, scattering_function=scattering_function, **obj, )
def case_keys(data_file): reader = CaseIndexer() with oms.open(data_file) as stream: for event in yaml.parse(stream): reader.read(event, stream.origin_mapper) return reader.case_keys
def test_emitter_events(events_filename, verbose=False): events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) output = yaml.emit(events) if verbose: print "OUTPUT:" print output new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def test_emitter_events(events_filename, verbose=False): events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) output = yaml.emit(events) if verbose: print("OUTPUT:") print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def case_keys(data_file): reader = CaseIndexer() with open(data_file) as stream: for event in yaml.parse(stream): reader.read(event) return reader.case_keys
def input_files_from_dir(in_dir): id_file = "sample_ids.yaml" with open(id_file) as in_handle: ids = yaml.parse(in_handle) sample_names = ids.keys() samples = [glob.glob("*_%s.R1.fastq" % (x)) for x in sample_names] return samples
def test_unicode_transfer(unicode_filename, verbose=False): data = open(unicode_filename, 'rb').read().decode('utf-8') for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: input = data if encoding is not None: input = (u'\ufeff'+input).encode(encoding) output1 = yaml.emit(yaml.parse(input), allow_unicode=True) stream = StringIO.StringIO() yaml.emit(yaml.parse(input), _unicode_open(stream, 'utf-8'), allow_unicode=True) output2 = stream.getvalue() if encoding is None: assert isinstance(output1, unicode), (type(output1), encoding) else: assert isinstance(output1, str), (type(output1), encoding) output1.decode(encoding) assert isinstance(output2, str), (type(output2), encoding) output2.decode('utf-8')
def get_syntax_error(buffer): try: list(yaml.parse(buffer, Loader=yaml.BaseLoader)) except yaml.error.MarkedYAMLError as e: problem = LintProblem(e.problem_mark.line + 1, e.problem_mark.column + 1, 'syntax error: ' + e.problem) problem.level = 'error' return problem
def test_emitter_events(events_filename, verbose=False): with open(events_filename, 'rb') as file: events = list(yaml.load(file, Loader=EventsLoader)) output = yaml.emit(events) if verbose: print("OUTPUT:") print(output) new_events = list(yaml.parse(output)) _compare_events(events, new_events)
def highlight(self): input = self.input.read() if input.startswith(codecs.BOM_UTF16_LE): input = six.text_type(input, 'utf-16-le') elif input.startswith(codecs.BOM_UTF16_BE): input = six.text_type(input, 'utf-16-be') else: input = six.text_type(input, 'utf-8') substitutions = self.style.substitutions tokens = yaml.scan(input) events = yaml.parse(input) markers = [] number = 0 for token in tokens: number += 1 if token.start_mark.index != token.end_mark.index: cls = token.__class__ if (cls, -1) in substitutions: markers.append([ token.start_mark.index, +2, number, substitutions[cls, -1] ]) if (cls, +1) in substitutions: markers.append([ token.end_mark.index, -2, number, substitutions[cls, +1] ]) number = 0 for event in events: number += 1 cls = event.__class__ if (cls, -1) in substitutions: markers.append([ event.start_mark.index, +1, number, substitutions[cls, -1] ]) if (cls, +1) in substitutions: markers.append( [event.end_mark.index, -1, number, substitutions[cls, +1]]) markers.sort() markers.reverse() chunks = [] position = len(input) for index, weight1, weight2, substitution in markers: if index < position: chunk = input[index:position] for substring, replacement in self.style.replaces: chunk = chunk.replace(substring, replacement) chunks.append(chunk) position = index chunks.append(substitution) chunks.reverse() result = ''.join(chunks) if self.style.header: self.output.write(self.style.header) self.output.write(result.encode('utf-8')) if self.style.footer: self.output.write(self.style.footer)
def content_events(value): """Return an iterable of events presenting *value* within a YAML document""" return (e for e in yaml.parse(StringIO(yaml.dump(value))) if not isinstance(e, ( yaml.StreamStartEvent, yaml.DocumentStartEvent, yaml.DocumentEndEvent, yaml.StreamEndEvent, )))
def _find_endblock(self,stream): """Find the end of the block which is yaml-compliant""" endpos=0 try: for i in yaml.parse(stream,Loader=yaml.CLoader): endpos=i.end_mark.index except yaml.YAMLError,e: #stop at the last carriage return endpos=e.problem_mark.index endpos=stream.rfind('\n',0,endpos)
def get_auth(overwrite=None): if os.path.exists(CRED_PATH): with open(CRED_PATH) as f: creds = yaml.parse(f) if "api_key" in creds and "api_address" in creds: return creds else: raise Exception(f"credential file corrupt, please delete {CRED_PATH} and auth again.") else: return None
def test_emitter_events(events_filename, verbose=False): events = tuple(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) output = yaml.emit(events) if verbose: print("OUTPUT:", events_filename) print(output) new_events = list(yaml.parse(output)) no_comments = filter(lambda e: not isinstance(e, yaml.CommentEvent), events) _compare_events(list(no_comments), new_events)
def _compare_parsers(py_data, c_data, verbose): py_events = list(yaml.parse(py_data, Loader=yaml.PyLoader)) c_events = [] try: for event in yaml.parse(c_data, Loader=yaml.CLoader): c_events.append(event) assert len(py_events) == len(c_events), (len(py_events), len(c_events)) for py_event, c_event in zip(py_events, c_events): for attribute in ['__class__', 'anchor', 'tag', 'implicit', 'value', 'explicit', 'version', 'tags']: py_value = getattr(py_event, attribute, None) c_value = getattr(c_event, attribute, None) assert py_value == c_value, (py_event, c_event, attribute) finally: if verbose: print("PY_EVENTS:") pprint.pprint(py_events) print("C_EVENTS:") pprint.pprint(c_events)
def _compare_parsers(py_data, c_data, verbose): py_events = list(yaml.parse(py_data, Loader=yaml.PyLoader)) c_events = [] try: for event in yaml.parse(c_data, Loader=yaml.CLoader): c_events.append(event) assert len(py_events) == len(c_events), (len(py_events), len(c_events)) for py_event, c_event in zip(py_events, c_events): for attribute in ['__class__', 'anchor', 'tag', 'implicit', 'value', 'explicit', 'version', 'tags']: py_value = getattr(py_event, attribute, None) c_value = getattr(c_event, attribute, None) assert py_value == c_value, (py_event, c_event, attribute) finally: if verbose: print "PY_EVENTS:" pprint.pprint(py_events) print "C_EVENTS:" pprint.pprint(c_events)
def test_unicode_transfer(unicode_filename, verbose=False): data = open(unicode_filename, 'rb').read().decode('utf-8') for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: input = data if encoding is not None: input = (u'\ufeff'+input).encode(encoding) output1 = yaml.emit(yaml.parse(input), allow_unicode=True) if encoding is None: stream = io.StringIO() else: stream = io.BytesIO() yaml.emit(yaml.parse(input), stream, allow_unicode=True) output2 = stream.getvalue() assert isinstance(output1, basestring) if encoding is None: assert isinstance(output2, text_type) else: assert isinstance(output2, bytes) output2.decode(encoding)
def test_unicode_transfer(unicode_filename, verbose=False): with open(unicode_filename, 'rb') as file: data = file.read().decode('utf-8') for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: input = data if encoding is not None: input = ('\ufeff' + input).encode(encoding) output1 = yaml.emit(yaml.parse(input), allow_unicode=True) if encoding is None: stream = io.StringIO() else: stream = io.BytesIO() yaml.emit(yaml.parse(input), stream, allow_unicode=True) output2 = stream.getvalue() assert isinstance(output1, str), (type(output1), encoding) if encoding is None: assert isinstance(output2, str), (type(output1), encoding) else: assert isinstance(output2, bytes), (type(output1), encoding) output2.decode(encoding)
def __parse_log__(self, log_data): returned_log = StringIO() start_dict = False tmp_dict = None tmp_key = None is_value = False self.log_debug("Length of log buffer: %s" % log_data.getbuffer().nbytes) if log_data.getbuffer().nbytes > 0: try: for event in yaml.parse(log_data, Loader=yaml.CLoader): if isinstance(event, yaml.MappingStartEvent): start_dict = True tmp_dict = {} if isinstance(event, yaml.MappingEndEvent): start_dict = False if tmp_dict: if 'lvl' in tmp_dict.keys( ) and tmp_dict['lvl'] == 'target': if 'msg' in tmp_dict.keys(): if isinstance(tmp_dict['msg'], bytes): try: # seems like latin-1 is the encoding used by serial # this might not be true in all cases returned_log.write( tmp_dict["msg"].decode( 'latin-1', 'ignore') + "\n") except ValueError: # despite ignoring errors, they are still raised sometimes pass else: returned_log.write(tmp_dict['msg'] + "\n") del tmp_dict tmp_dict = None is_value = False if start_dict is True and isinstance( event, yaml.ScalarEvent): if is_value is False: # the event.value is a dict key tmp_key = event.value is_value = True else: # the event.value is a dict value tmp_dict.update({tmp_key: event.value}) is_value = False except (yaml.scanner.ScannerError, yaml.parser.ParserError): log_data.seek(0) wrapper = TextIOWrapper(log_data, encoding='utf-8') self.log_error("Problem parsing LAVA log\n" + wrapper.read() + "\n" + traceback.format_exc()) return returned_log.getvalue()
def highlight(self): input = self.input.read() if input.startswith(codecs.BOM_UTF16_LE): input = unicode(input, 'utf-16-le') elif input.startswith(codecs.BOM_UTF16_BE): input = unicode(input, 'utf-16-be') else: input = unicode(input, 'utf-8') substitutions = self.style.substitutions tokens = yaml.scan(input) events = yaml.parse(input) markers = [] number = 0 for token in tokens: number += 1 if token.start_mark.index != token.end_mark.index: cls = token.__class__ if (cls, -1) in substitutions: markers.append([token.start_mark.index, +2, number, substitutions[cls, -1]]) if (cls, +1) in substitutions: markers.append([token.end_mark.index, -2, number, substitutions[cls, +1]]) number = 0 for event in events: number += 1 cls = event.__class__ if (cls, -1) in substitutions: markers.append([event.start_mark.index, +1, number, substitutions[cls, -1]]) if (cls, +1) in substitutions: markers.append([event.end_mark.index, -1, number, substitutions[cls, +1]]) markers.sort() markers.reverse() chunks = [] position = len(input) for (index, weight1, weight2, substitution) in markers: if index < position: chunk = input[index:position] for (substring, replacement) in self.style.replaces: chunk = chunk.replace(substring, replacement) chunks.append(chunk) position = index chunks.append(substitution) chunks.reverse() result = u''.join(chunks) if self.style.header: self.output.write(self.style.header) self.output.write(result.encode('utf-8')) if self.style.footer: self.output.write(self.style.footer)
def test_parser_on_canonical(canonical_filename, verbose=False): events1 = None events2 = None try: events1 = list(yaml.parse(open(canonical_filename, 'rb'))) events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) _compare_events(events1, events2, full=True) finally: if verbose: print("EVENTS1:") pprint.pprint(events1) print("EVENTS2:") pprint.pprint(events2)
def test_parser(data_filename, canonical_filename, verbose=False): events1 = None events2 = None try: events1 = list(yaml.parse(open(data_filename, 'rb'))) events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) _compare_events(events1, events2) finally: if verbose: print "EVENTS1:" pprint.pprint(events1) print "EVENTS2:" pprint.pprint(events2)
def test_parser_on_canonical(canonical_filename, verbose=False): events1 = None events2 = None try: events1 = list(yaml.parse(open(canonical_filename, 'rb'))) events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) _compare_events(events1, events2, full=True) finally: if verbose: print "EVENTS1:" pprint.pprint(events1) print "EVENTS2:" pprint.pprint(events2)
def __init__(self, stream, start_byte, key_fields, *, safe_loading=None): super().__init__() if safe_loading is not None and safe_loading is not self.safe_loading: self.safe_loading = safe_loading self.key_fields = frozenset(key_fields) stream.seek(start_byte) self._events = yaml.parse(stream) self._key = None self._value = None next(self._events) # should be yaml.StreamStartEvent next(self._events) # should be yaml.DocumentStartEvent self._read_start_of_contents()
def _load_stack(name, extension): """Helper function to allow for inlinine via recursion Inputs: name: stackname - file same name must be found on path extension: entension of language that the stack file is written in """ #print "NAME: ", name path = find_file_on_path(sys.path, name+extension) if path: s = open(path,"r").read() ps = yaml.parse(s) return _process_stack(ps,extension) else: raise IOError("%s not found" % name)
def validate(stream): """Validate a stream of YAML AO classifications. This produces a sequence of strings, one for each error (i.e. duplicated path or code) in the stream. Also, if the YAML can't be parsed for some reason, the exception from the YAML parser is propagated out. """ all_classifications = list(classifications(yaml.parse(stream))) paths = Counter(c[0] for c in all_classifications) for k in [k for k, v in paths.items() if v > 1]: yield 'duplicate code: {}'.format(k) codes = Counter('/'.join(c[1]) for c in all_classifications) for k in [k for k, v in codes.items() if v > 1]: yield 'duplicate path: {}'.format(k)
def _CssYamlToDict(stream): """Parse a YAML CSS file to an ordered tree.""" event_gen = yaml.parse(stream) # Prologue check if (not isinstance(event_gen.next(), yaml.events.StreamStartEvent) or not isinstance(event_gen.next(), yaml.events.DocumentStartEvent)): raise YamlCssError('Malformed document prologue') if not isinstance(event_gen.next(), yaml.events.MappingStartEvent): raise YamlCssError('The entire YAML CSS file must be a mapping') # Parse the entire top-level mapping toplevel = _BuildMap(event_gen) # Epilogue check if (not isinstance(event_gen.next(), yaml.events.DocumentEndEvent) or not isinstance(event_gen.next(), yaml.events.StreamEndEvent)): raise YamlCssError('Malformed document epilogue') return toplevel
def load(self, document): """Loads the YAML document and returns the root DataNode.""" if document is None: return None self._fatal_error_node = None self._iterate_events = True self.anchors = {} self._document = document self._event_generator = pyyaml.parse(self._document) self._next_parse_event() # StreamStartEvent self._next_parse_event() # DocumentStartEvent self._next_parse_event() # first actual event root = self._create_node() if self._fatal_error_node: if root.implementation != DataNode.Implementation.scalar: # pylint: disable=no-member root.set_child(self._fatal_error_node) root.span.end = self._fatal_error_node.end return root
def __parse_log__(self, log_data): returned_log = "" start_dict = False tmp_dict = None tmp_key = None is_value = False for event in yaml.parse(log_data, Loader=yaml.CLoader): if isinstance(event, yaml.MappingStartEvent): start_dict = True tmp_dict = {} if isinstance(event, yaml.MappingEndEvent): start_dict = False if tmp_dict: if 'lvl' in tmp_dict.keys() and tmp_dict['lvl'] == 'target': if 'msg' in tmp_dict.keys(): if isinstance(tmp_dict['msg'], bytes): try: # seems like latin-1 is the encoding used by serial # this might not be true in all cases returned_log = returned_log + "\n" + tmp_dict["msg"].decode('latin-1', 'ignore') except ValueError: # despite ignoring errors, they are still raised sometimes pass else: returned_log = returned_log + "\n" + tmp_dict['msg'] del tmp_dict tmp_dict = None is_value = False if start_dict is True and isinstance(event, yaml.ScalarEvent): if is_value is False: # the event.value is a dict key tmp_key = event.value is_value = True else: # the event.value is a dict value tmp_dict.update({tmp_key: event.value}) is_value = False return returned_log
def build_graph(self, text): events = yaml.parse(text) node_stack = [None] nodes = [] mapping_changed = False for event in events: if type(event) is yaml.MappingStartEvent: mapping_changed = True else: if type(event) is yaml.MappingEndEvent: mapping_changed = True node_stack.pop() else: if type(event) is yaml.ScalarEvent: if mapping_changed: node = Node(event.value, event.start_mark.line, node_stack[-1]) nodes.append(node) node_stack.append(node) mapping_changed = not mapping_changed return nodes
def canonical_parse(stream): return yaml.parse(stream, Loader=CanonicalLoader)
resp = self.beach.addToCategory( arguments.id, arguments.category ) self.printOut( resp ) if __name__ == '__main__': conf = None parser = argparse.ArgumentParser( description = 'CLI for a Beach cluster.' ) parser.add_argument( 'config', type = str, default = None, help = 'cluster config file' ) args = parser.parse_args() if args.config is None: with open( '~/.beach', 'r' ) as f: userConf = yaml.parse( f.read() ) if 'config' in userConf: conf = userConf[ 'config' ] else: conf = args.config if conf is None: print( "Usage: beach_cli.py pathToBeachConfigFile\n" "If no config file is present, the one defined in ~/.beach will be used." ) else: app = BeachShell( conf ) app.cmdloop()
def test_yaml_syntax(filename): with open(filename) as f: # coerce to force evaluation of generator list(yaml.parse(f))
def test_mapping_with_item(self): single_mapping = """\ blah: not-blah """ out = yaml.load(yaml.emit(_process_stack(yaml.parse(single_mapping)))) self.assertEqual(out["blah"], "not-blah")
writer = DatumWriter(schema) def column(name, value): column = dict() column['name'] = name column['value'] = value column['clock'] = {'timestamp': long(time.time() * 1e6)} column['ttl'] = 0 return column # parse top level yaml records and output a series of objects matching # 'StreamingMutation' in the Avro interface mutation = dict() mutation['mutation'] = {COSC: {'column': column(None, None)}} try: iter = yaml.parse(sys.stdin) while True: event = iter.next() if isinstance(event, yaml.ScalarEvent): # scalars mark the beginnings of rows or columns scalar = event.value.encode() event = iter.next() if isinstance(event, yaml.MappingStartEvent): # new row mutation['key'] = scalar.encode() else: # new column value = event.value.encode() mutation['mutation'][COSC]['column']['name'] = scalar mutation['mutation'][COSC]['column']['value'] = value # flush the mutation
def test_single_empty_mapping(self): single_empty_mapping = """\ blah: """ out = yaml.load(yaml.emit(_process_stack(yaml.parse(single_empty_mapping)))) self.assertEqual(out["blah"], None)