def split_buffer(stream, splitter=None, decoder=lambda a: a): """Given a generator which yields strings and a splitter function, joins all input, splits on the separator and yields each chunk. Unlike string.split(), each chunk includes the trailing separator, except for the last one if none was found on the end of the input. """ splitter = splitter or line_splitter buffered = six.text_type('') for data in stream_as_text(stream): buffered += data while True: buffer_split = splitter(buffered) if buffer_split is None: break item, buffered = buffer_split yield item if buffered: try: yield decoder(buffered) except Exception as e: raise StreamParseError(e)
def split_buffer(stream, splitter=None, decoder=lambda a: a): """Given a generator which yields strings and a splitter function, joins all input, splits on the separator and yields each chunk. Unlike string.split(), each chunk includes the trailing separator, except for the last one if none was found on the end of the input. """ splitter = splitter or line_splitter buffered = six.text_type('') for data in stream_as_text(stream): buffered += data while True: buffer_split = splitter(buffered) if buffer_split is None: break item, buffered = buffer_split yield item if buffered: try: yield decoder(buffered) except Exception as e: log.error( 'Compose tried decoding the following data chunk, but failed:' '\n%s' % repr(buffered) ) raise StreamParseError(e)
def split_buffer(stream, splitter=None, decoder=lambda a: a): """Given a generator which yields strings and a splitter function, joins all input, splits on the separator and yields each chunk. Unlike string.split(), each chunk includes the trailing separator, except for the last one if none was found on the end of the input. """ splitter = splitter or line_splitter buffered = six.text_type('') for data in stream_as_text(stream): buffered += data while True: buffer_split = splitter(buffered) if buffer_split is None: break item, buffered = buffer_split yield item if buffered: yield decoder(buffered)
def decoder(self, graph): graph = json.decoder(graph) increment(graph) graph = json.dumps(graph, default=lambda o: o.__dict__)
#!/usr/bin/python import argparse import urllib2 from upsilon.serviceHelpers import * parser = argparse.ArgumentParser(); parser.add_argument("baseurl") parser.add_argument("-c", "--criticalCount", type = int, default = 0) parser.add_argument("-w", "--warningCount", type = int, default = 5) args = parser.parse_args() import json content = urllib2.urlopen(args.baseurl).read() jsonStructure = json.decoder(content); if len(jsonStructure) > args.countCritical: exitCritical() elif len(jsonStructure) > args.countWarning: exitWarning() else: exitOk()