Example #1
0
    def parse(self, unreader):
        buf = StringIO()

        self.get_data(unreader, buf, stop=True)
        
        # Request line
        idx = buf.getvalue().find("\r\n")
        while idx < 0:
            self.get_data(unreader, buf)
            idx = buf.getvalue().find("\r\n")
        self.parse_first_line(buf.getvalue()[:idx])
        rest = buf.getvalue()[idx+2:] # Skip \r\n
        buf.truncate(0)
        buf.write(rest)
        
        # Headers
        idx = buf.getvalue().find("\r\n\r\n")
        done = buf.getvalue()[:2] == "\r\n"
        while idx < 0 and not done:
            self.get_data(unreader, buf)
            idx = buf.getvalue().find("\r\n\r\n")
            done = buf.getvalue()[:2] == "\r\n"
        if done:
            self.unreader.unread(buf.getvalue()[2:])
            return ""
        self.headers = self.parse_headers(buf.getvalue()[:idx])

        ret = buf.getvalue()[idx+4:]
        buf.truncate(0)
        return ret
Example #2
0
class UnicodeCsvWriter:
    def __init__(self, f, dialect=csv.excel, **kwds):
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f

    def writerow(self, row):
        """
        writerow(unicode) -> None
        This function takes a Unicode string and encodes it to the output.
        """
        data = []
        basestring_type = six.string_types[0]
        for value in row:
            if not isinstance(value, basestring_type):
                value = '%s' % (value,)
            data.append(value.encode('utf-8'))
        self.writer.writerow(data)
        data = self.queue.getvalue()
        data = data.decode('utf-8')
        self.stream.write(data)
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
class CsvDictsAdapter(object):
    """Provide a DataChange generator and it provides a file-like object which returns csv data"""
    def __init__(self, source_generator):
        self.source = source_generator
        self.buffer = StringIO()
        self.csv = None
        self.add_header = False

    def __iter__(self):
        return self

    def write_header(self):
        self.add_header = True

    def next(self):
        row = self.source.next()
        for k, v in row.items():
            if isinstance(v, basestring):
                row[k] = v.encode('utf-8')
        self.buffer.truncate(0)
        self.buffer.seek(0)

        if not self.csv:
            self.csv = csv.DictWriter(self.buffer, row.keys(), quoting = csv.QUOTE_NONNUMERIC)
            self.csv.writeheader()
        elif self.add_header:
            self.csv.writeheader()
            self.add_header = False

        self.csv.writerow(row)
        self.buffer.seek(0)
        return self.buffer.read()
def export_msg(fn):
    base_tbl = GetTable()
    base = open(fn , "rb")
    fp = StringIO()
    fp.write(base.read())
    fp.seek(0)
    cnlines = codecs.open("t3b_cn.txt" , "rb" , "utf-16").readlines()
    index_end_offset  = struct.unpack("I" , fp.read(4))[0]
    string_list = makestr(cnlines)
    nums = len(string_list)
    fp.seek(0x18c48)
    fp.truncate()
    tmp = fp.tell()
    for i in xrange(nums):
        fp.seek(0,2)
        tmp = fp.tell()
        string = string2hex(string_list[i] , base_tbl)
        fp.write(string)
        fp.seek(0x5dfc + i*0xc)
        fp.write(struct.pack("I" , tmp))
        fp.write(struct.pack("I" , len(string)/2))
        fp.seek(0,2)
    data = fp.getvalue()
    dest = open("import\\00000019.lang" , "wb")
    dest.write(data)
    dest.close()
Example #5
0
class DebugStream:
    """

    Class for writing log messages to the simx log files
    :param stream_type: stream_type can be of the following types::
     * :func:`simx.core.core.debug1`
     * debug2

    """
    def __init__(self,stream_type):
        self.stream_type = stream_type
        self.debug_str = StringIO()
        # define a logger function dictionary here    
        self.logger = {'debug1':core.debug1,
                       'debug2':core.debug2,
                       'debug3':core.debug3,
                       'info':core.debug_info,
                       'warn':core.warn,
                       'error':core.error,
                       'failure':core.failure }

    # define debug stream writer method   
    def write(self,*message):
        for token in message:
            self.debug_str.write(str(token));
            #self.debug_str.write(" ");
        self.logger[self.stream_type](self.debug_str.getvalue())
        self.debug_str.truncate(0)
Example #6
0
class LogWrapper(object):
    def setUp(self, level=log.INFO, encoding='utf-8'):
        self.f = StringIO()
        self.flo = log.CrawlmiFileLogObserver(self.f, level, encoding)
        self.flo.start()

    def tearDown(self):
        self.flo.stop()

    def clear(self):
        self.f.reset()
        self.f.truncate()

    def get_logged(self, clear=True):
        logged = self.f.getvalue()
        if clear:
            self.clear()
        return logged

    def get_lines(self, strip=True, clear=True):
        lines = self.get_logged(clear=clear).splitlines()
        if strip:
            lines = map(lambda l: l.strip()[25:], lines)
        return lines

    def get_first_line(self, strip=True, clear=True):
        lines = self.get_lines(strip=strip, clear=clear)
        return lines[0] if lines else ''
Example #7
0
 def __call__(self, environ, start_response):
     request = Request(environ)
     self.lock.acquire()
     try:
         profiler = cProfile.Profile()
         response = profiler.runcall(request.get_response, self.app)
         content_type = response.headers.get('content-type')
         if content_type is None or not content_type.startswith('text/html'):
             # We can't add info to non-HTML output
             return response(environ, start_response)
         stream = StringIO()
         stats = pstats.Stats(profiler, stream=stream)
         stats.strip_dirs()
         stats.sort_stats('cumulative', 'calls')
         stats.print_stats(self.limit)
         output = stream.getvalue()
         stream.seek(0)
         stream.truncate()
         stats.print_callers(self.limit)
         output_callers = stream.getvalue()
         stream.close()
         extra = '<pre style="%s">%s\n%s</pre>' % (
             self.style, cgi.escape(output), cgi.escape(output_callers))
         response.body += extra.encode('ascii', 'xmlcharrefreplace')
         return response(environ, start_response)
     finally:
         self.lock.release()
Example #8
0
class TagFactory(object):
    def __init__(self):
        self.stream = StringIO()
        self._count = 0

    def write(self, d):
        return self.stream.write(d)

    def _enter_callback(self):
        self._count += 1

    def _exit_callback(self):
        self._count -= 1

    def __call__(self, *args, **kwargs):
        return Tag(self, _enter_callback=self._enter_callback, _exit_callback=self._exit_callback, *args, **kwargs)

    def lines(self):
        if self.stream.tell():
            yield self.stream.getvalue()
            self.stream.truncate(0)

    def escape(self, obj):
        if self._count:
            return escapeHtml(str(obj))
        return str(obj)

    def as_is(self, obj):
        return AsIs(obj)

    def compose(self, f):
        return partial(tag_compose(f, __bw_compat__=True), self)
Example #9
0
File: srj.py Project: ox-it/humfrey
    def _iter(self, sparql_results_type, fields, bindings, boolean, triples):
        if sparql_results_type not in ('resultset', 'boolean'):
            raise TypeError("Unexpected results type: {0}".format(sparql_results_type))

        # We'll spool to a buffer, and only yield when it gets a bit big.
        buffer = StringIO()

        # Do these attribute lookups only once.
        json_dumps, json_dump, buffer_write = json.dumps, json.dump, buffer.write

        buffer_write('{\n')
        if sparql_results_type == 'boolean':
            buffer_write('  "head": {},\n')
            buffer_write('  "boolean": %s' % ('true' if boolean else 'false'))
        elif sparql_results_type == 'resultset':
            buffer_write('  "head": {\n')
            buffer_write('    "vars": [ %s ]\n' % ', '.join(json_dumps(field) for field in fields))
            buffer_write('  },\n')
            buffer_write('  "results": {\n')
            buffer_write('    "bindings": [\n')
            for i, binding in enumerate(bindings):
                buffer_write('      {' if i == 0 else ',\n      {')
                j = 0
                for field in fields:
                    value = binding.get(field)
                    if value is None:
                        continue
                    buffer_write(',\n        ' if j > 0 else '\n        ')
                    json_dump(field, buffer)
                    if isinstance(value, rdflib.URIRef):
                        buffer_write(': { "type": "uri"')
                    elif isinstance(value, rdflib.BNode):
                        buffer_write(': { "type": "bnode"')
                    elif value.datatype is not None:
                        buffer_write(': { "type": "typed-literal", "datatype": ')
                        json_dump(value.datatype, buffer)
                    elif value.language is not None:
                        buffer_write(': { "type": "literal", "xml:lang": ')
                        json_dump(value.language, buffer)
                    else:
                        buffer_write(': { "type": "literal"')
                    buffer_write(', "value": ')
                    json_dump(value, buffer)
                    buffer_write(' }')

                    j += 1

                buffer_write('\n      }')
            buffer_write('\n    ]')
            buffer_write('\n  }')


            if buffer.tell() > 65000: # Almost 64k
                yield buffer.getvalue()
                buffer.seek(0)
                buffer.truncate()

        buffer_write('\n}')
        yield buffer.getvalue()
        buffer.close()
Example #10
0
class writer(object):
    def __init__(self, f, *args, **kwargs):
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, *args, **kwargs)
        self.stream = f
        if isinstance(self.stream, basestring):
            self.stream = io.open(f, 'wt', newline='', encoding=kwargs['dialect'].encoding)


    def flush(self):
        data = decode(self.queue.getvalue())
        self.stream.write(data)
        self.queue.truncate(0)

    def writerow(self, row, flush=True):
        self.writer.writerow([encode(s) for s in row])
        if flush: self.flush()

    def writerows(self, rows):
        for row in rows:
            self.writerow(row, False)
        self.flush()

    def __enter__(self):
        return self

    def __exit__(self, *args):
        self.flush()
        self.stream.close()

    def __getattr__(self, name):
        return getattr(self.writer, name)
Example #11
0
class DictWriter(object):
    def __init__(self, f, *args, **kwargs):
        self.queue = StringIO()
        if 'fieldnames' in kwargs:
            kwargs['fieldnames'] = [k.encode('ascii', 'ignore') for k in kwargs['fieldnames']]
        self.writer = csv.DictWriter(self.queue, *args, **kwargs)
        self.stream = f

    def flush(self):
        data = decode(self.queue.getvalue())
        self.stream.write(data)
        self.queue.truncate(0)

    def writerow(self, row, flush=True):
        self.writer.writerow(OrderedDict((k.encode('ascii', 'ignore'), encode(row.get(k, ''))) for k in self.writer.fieldnames))
        if flush: self.flush()

    def writeheader(self):
        self.writerow(OrderedDict((f, f) for f in self.writer.fieldnames), flush=False)
        if len(self.queue.getvalue()) > 10000:
            self.flush()

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)

    def __enter__(self):
        return self

    def __exit__(self, *args):
        self.flush()

    def __getattr__(self, name):
        return getattr(self.writer, name)
Example #12
0
class UnicodeWriter(object):
    """
    A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.
    source: https://docs.python.org/2/library/csv.html
    """

    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        """Unicode-enabled writerow """
        self.writer.writerow([s.encode("utf-8") for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        """Unicode-enabled writerows """
        for row in rows:
            self.writerow(row)
Example #13
0
	def jardestroyer(self):
		"""
		disconnection helper function
		
		For all connections established in jarbuilder send the
		disconnect command to the server
		
		:returns: server response (empty string if successful)
		"""
		c = pycurl.Curl()
		buf = SIO()
		uri = uri = '{base}/JSESSION'.format(base=self.page)
		c.setopt(pycurl.URL, uri)
		body = ''
		for cookie in self.cj:
			print "closing cxn: %s"%cookie
			c.setopt(pycurl.COOKIE, "JSESSIONID=%s"%cookie)
			c.setopt(pycurl.SSLVERSION, pycurl.SSLVERSION_SSLv3)
			c.setopt(pycurl.CUSTOMREQUEST, 'DELETE')
			c.setopt(pycurl.WRITEDATA, buf)
			c.perform()
			buf.truncate()
			body+=buf.getvalue()
			buf.reset()
		for c in self.h:
			if c.buf is not None:
				c.buf.close()
				c.buf = None
			c.close()
		self.m.close()
		
		return body
Example #14
0
class BufferedCompressedWriter(object):

    def __init__(self, outstream, buffer_size=_DEFAULT_BUFFER_SIZE):
        self.outputstream = outstream
        self.compressor = zlib.compressobj()
        self.buffer_size = buffer_size
        self.buffer = StringIO()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self.close()

    def write(self, bytes):
        self.buffer.write(bytes)

        if self.buffer.tell() >= self.buffer_size:
            self.flush()

    def flush(self):
        buffered_bytes = self.buffer.getvalue()
        self.buffer.truncate(0)

        compressed_bytes = self.compressor.compress(buffered_bytes)

        self.outputstream.write(compressed_bytes)


    def close(self):
        self.flush()
        remaining_compressed_bytes = self.compressor.flush()
        self.outputstream.write(remaining_compressed_bytes)
        self.outputstream.flush()
        self.compressor = None
	class DictUnicodeWriter(object):
		"""
		Code borrowed from http://stackoverflow.com/a/5838817
		"""
		def __init__(self, f, fieldnames, dialect=csv.excel, encoding="utf-8", **kwds):
			# Redirect output to a queue
			self.queue = StringIO()
			self.writer = csv.DictWriter(self.queue, fieldnames, dialect=dialect, **kwds)
			self.stream = f
			self.encoder = getincrementalencoder(encoding)()
		def writerow(self, D):
			self.writer.writerow({k:v.encode("utf-8") for k,v in D.items()})
			# Fetch UTF-8 output from the queue ...
			data = self.queue.getvalue()
			data = data.decode("utf-8")
			# ... and reencode it into the target encoding
			data = self.encoder.encode(data)
			# write to the target stream
			self.stream.write(data)
			# empty queue
			self.queue.truncate(0)
		def writerows(self, rows):
			for D in rows:
				self.writerow(D)
		def writeheader(self):
			self.writer.writeheader()
Example #16
0
class EchoTestCase(unittest.TestCase):
    """tests for the echo server and client"""
    connection_msg = 'connecting to 127.0.0.1 port 10000'
    sending_msg = 'sending "{0}"'
    received_msg = 'received "{0}"'
    closing_msg = 'closing socket'

    def setUp(self):
        """set up our tests"""
        if not hasattr(self, 'buff'):
            # ensure we have a buffer for the client to write to
            self.log = StringIO()
        else:
            # ensure that the buffer is set to the start for the next test
            self.log.seek(0)

    def tearDown(self):
        """clean up after ourselves"""
        if hasattr(self, 'buff'):
            # clear our buffer for the next test
            self.log.seek(0)
            self.log.truncate()

    def send_message(self, message):
        """Attempt to send a message using the client and the test buffer
        In case of a socket error, fail and report the problem
        """
        try:
            client(message, self.log)
        except socket.error, e:
            if e.errno == 61:
                msg = "Error: {0}, is the server running?"
                self.fail(msg.format(e.strerror))
            else:
                self.fail("Unexpected Error: {0}".format(str(e)))
Example #17
0
    def follow(self, s=1, poll_time=.01):
        ''' Do a tail follow. If a callback function is registered it is called with every new line. 
        Else printed to standard out.
    
        Arguments:
            s - Number of seconds to wait between each iteration; Defaults to 1. '''
        last = int(time.time())
        readBuffer = StringIO()
        with open(self.tailed_file, 'rb') as file_:
            file_.seek(0, os.SEEK_END)
            while True:
                readBuffer.write(file_.read())
                readBuffer.seek(0)
                complete = True
                for line in readBuffer:
                    if not line.endswith(os.linesep): 
                        complete = False
                        break

                    self.callback(line)
                    time.sleep(s)

                if self.wait_func:
                    last = self.run_wait(last)
                
                # Catch the slop if the last line isn't complete
                readBuffer.truncate(0)
                if not complete:
                    if len(line) > self.max_line_length:
                        raise TailError("Line exceeds maximum allowed line length")

                    readBuffer.write(line)
                
                time.sleep(poll_time)
Example #18
0
File: body.py Project: czue/restkit
class EOFReader(object):
    def __init__(self, req, unreader):
        self.req = req
        self.unreader = unreader
        self.buf = StringIO()
        self.finished = False
    
    def read(self, size):
        if not isinstance(size, (int, long)):
            raise TypeError("size must be an integral type")
        if size < 0:
            raise ValueError("Size must be positive.")
        if size == 0 or self.finished:
            return ""

        
        data = self.unreader.read()
        while data:
            self.buf.write(data)
            if self.buf.tell() > size:
                break
            data = self.unreader.read()

        if not data:
            self.finished = True
            return self.buf.getvalue()
            
        data = self.buf.getvalue()
        ret, rest = data[:size], data[size:]
        self.buf.truncate(0)
        self.buf.write(rest)
        return ret
class TestPrefilterFrontEnd(PrefilterFrontEnd):
    
    input_prompt_template = string.Template('')
    output_prompt_template = string.Template('')
    banner = ''

    def __init__(self):
        ipython0 = get_ipython0().IP
        self.out = StringIO()
        PrefilterFrontEnd.__init__(self, ipython0=ipython0)
        # Clean up the namespace for isolation between tests
        user_ns = self.ipython0.user_ns
        # We need to keep references to things so that they don't
        # get garbage collected (this stinks).
        self.shadow_ns = dict()
        for i in self.ipython0.magic_who_ls():
            self.shadow_ns[i] = user_ns.pop(i)
        # Some more code for isolation (yeah, crazy)
        self._on_enter()
        self.out.flush()
        self.out.reset()
        self.out.truncate()

    def write(self, string, *args, **kwargs):
       self.out.write(string) 

    def _on_enter(self):
        self.input_buffer += '\n'
        PrefilterFrontEnd._on_enter(self)
Example #20
0
    def stream_export(self):
        headers_formatted = self.headers_formatted
        headers_raw = self.headers_raw
        csv_results = self.csv_results
        results = csv_results[1:]

        csv_file = StringIO()
        csv_writer = unicodecsv.writer(csv_file, encoding='utf-8')

        csv_header = []
        for header in headers_raw:
            header_formatted = headers_formatted[header]
            csv_header.append(header_formatted)
        csv_writer.writerow(csv_header)
        yield csv_file.getvalue()
        # We remove the last element to avoid overlap of values
        csv_file.seek(0)
        csv_file.truncate()

        for result in results:
            results_encoded = []
            for individual_result in result:
                results_encoded.append(individual_result)
            csv_writer.writerow(results_encoded)
            yield csv_file.getvalue()
            # We remove the last element to avoid overlap of values
            csv_file.seek(0)
            csv_file.truncate()
Example #21
0
class DictUnicodeWriter(object):

    def __init__(self, f, fieldnames, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.DictWriter(self.queue, fieldnames, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, D):
        self.writer.writerow({k:v for k, v in D.items() if v})
        
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        try: #python2 
            data = data.decode("utf-8")
        except AttributeError: #python3 
            data = str.encode(data).decode("utf-8")
        # ... and re-encode it into the target encoding
        data = self.encoder.encode(data)
        # Write to the target stream
        self.stream.write(data)
        # Empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for D in rows:
            self.writerow(D)

    def writeheader(self):
        self.writer.writeheader()
Example #22
0
class CaptureHandler(logging.StreamHandler):
    MSG_RE = re.compile(r'(?:.* -- )(?P<Message>.*)')

    def __init__(self):
        self._stream = StringIO()

        super(CaptureHandler, self).__init__(self._stream)


    def read_logs(self):
        logs = []

        for log in self._stream.getvalue().splitlines():
            logs.append(log)

        self._stream.truncate(0)

        return logs

    def read_messages(self):
        msgs = []

        for log in self._stream.getvalue().splitlines():
            msgs.append(self.MSG_RE.match(log).group('Message'))

        self._stream.truncate(0)

        return msgs
Example #23
0
    def from_data(self, fields, rows):
        fp = StringIO()
        writer = csv.writer(fp, quoting=csv.QUOTE_ALL)

        writer.writerow([name.encode("utf-8") for name in fields])

        for data in rows:
            row = []
            for d in data:
                if isinstance(d, basestring):
                    d = d.replace("\n", " ").replace("\t", " ")
                    try:
                        d = d.encode("utf-8")
                    except UnicodeError:
                        pass
                if d is False:
                    d = None
                row.append(d)
            writer.writerow(row)

            if fp.tell() >= 1250:
                fp.seek(0)
                data = fp.read()
                yield data
                fp.seek(0)
                fp.truncate()
                row = []

        fp.seek(0)  # Flush the final data
        data = fp.read()
        fp.close()
        yield data
        return
Example #24
0
class EOFReader(object):
    def __init__(self, unreader):
        self.unreader = unreader
        self.buf = StringIO()
        self.finished = False
    
    def read(self, size=None):
        if size == 0 or self.finished:
            return ""
        if size < 0:
            size = None
        
        data = self.unreader.read()
        while data:
            self.buf.write(data)
            if size is not None and self.buf.tell() > size:
                data = self.buf.getvalue()
                ret, rest = data[:size], data[size:]
                self.buf.truncate(0)
                self.buf.write(rest)
                return ret
            data = self.unreader.read()

        self.finished = True
        ret = self.buf.getvalue()
        self.buf.truncate(0)
        return ret
Example #25
0
class UnicodeCsvWriter:
    """
    A CSV writer that writes rows to CSV file `f` with the given encoding.
    """

    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        # self.writer.writerow([s.encode("utf-8") for s in row])
        self.writer.writerow([self._encode_item(s) for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):  # pragma: no cover
        for row in rows:
            self.writerow(row)

    def _encode_item(self, data):
        if isstring(data):
            return data.encode('utf-8')
        return data
Example #26
0
class UnicodeCSVWriter(object):
    """
    A CSV writer which will write rows to a file in the specified encoding.
    """
    def __init__(self, f, encoding='utf-8', **kwargs):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, **kwargs)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        self.writer.writerow([unicode(s if s != None else '').encode('utf-8') for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode('utf-8')
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
def scan_doc(filepath):
    is_def_next = False
    comment = StringIO()
    for tok in generate_tokens(open(filepath).readline):
        t_type, t_data, t_begin, t_end, line = tok
        if t_type == COMMENT:
            line = t_data.strip("# ").strip()
            if not line:
                continue
            # Skip thise unexpected comments.
            if line[0] == line[-1] == "-":
                continue
            if line[0] == '"':
                continue
            line = line.replace("-#", "-")
            comment.write(line + os.linesep)
        elif t_type == token.NAME:
            if t_data == "def":
                is_def_next = True
                continue
            if is_def_next:
                is_def_next = False
                if t_data.startswith("command_"):
                    yield t_data, comment.getvalue()
                comment.truncate(0)
        elif t_type == token.NEWLINE:
            comment.truncate(0)
Example #28
0
    class UnicodeWriter:
        """
        A CSV writer which will write rows to CSV file "f",
        which is encoded in the given encoding.
        """

        def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwargs):
            # Redirect output to a queue
            self.queue = StringIO()
            self.writer = csv.writer(self.queue, dialect=dialect, **kwargs)
            self.stream = f
            self.encoder = codecs.getincrementalencoder(encoding)()

        def writerow(self, row):
            row = [unicode(s) for s in row]
            self.writer.writerow([s.encode("utf-8") for s in row])
            # Fetch UTF-8 output from the queue ...
            data = self.queue.getvalue()
            data = data.decode("utf-8")
            # ... and reencode it into the target encoding
            data = self.encoder.encode(data)
            # write to the target stream
            self.stream.write(data)
            # empty queue
            self.queue.truncate(0)

        def writerows(self, rows):
            for row in rows:
                self.writerow(row)
Example #29
0
    def stream_export(self):
        node_type_id = self.node_type_id
        node_type = get_object_or_404(NodeType, id=node_type_id)
        nodes = node_type.all()
        properties = node_type.properties.all()
        csv_header = []

        csv_file = StringIO()
        csv_writer = unicodecsv.writer(csv_file, encoding='utf-8')

        for prop in properties:
            header = prop.key
            csv_header.append(header)
        csv_writer.writerow(csv_header)
        yield csv_file.getvalue()
        # We remove the last element to avoid overlap of values
        csv_file.seek(0)
        csv_file.truncate()

        for node in nodes:
            csv_node_values = []
            node_properties = node.properties
            for header_prop in csv_header:
                prop_value = node_properties.get(header_prop, 0)
                csv_node_values.append(prop_value)
            csv_writer.writerow(csv_node_values)
            yield csv_file.getvalue()
            # We remove the last element to avoid overlap of values
            csv_file.seek(0)
            csv_file.truncate()
class CsvDictsAdapter(object):
    """Provide a DataChange generator and it provides a file-like object which returns csv data"""

    def __init__(self, source_generator):
        self.source = source_generator
        self.buffer = StringIO()
        self.csv = None
        self.add_header = False

    def __iter__(self):
        return self

    def write_header(self):
        self.add_header = True

    def next(self):
        row = self.source.next()

        self.buffer.truncate(0)
        self.buffer.seek(0)

        if not self.csv:
            self.csv = csv.DictWriter(self.buffer, row.keys(), quoting=csv.QUOTE_NONNUMERIC)
            self.add_header = True
        if self.add_header:
            if hasattr(self.csv, "writeheader"):
                self.csv.writeheader()
            else:
                self.csv.writerow(dict((fn, fn) for fn in self.csv.fieldnames))
            self.add_header = False

        self.csv.writerow(row)
        self.buffer.seek(0)
        return self.buffer.read()
Example #31
0
class SubclassableCStringIO(object):
    """
    A wrapper around cStringIO to allow for subclassing.
    """
    __csio = None

    def __init__(self, *a, **kw):
        from cStringIO import StringIO
        self.__csio = StringIO(*a, **kw)

    def __iter__(self):
        return self.__csio.__iter__()

    def next(self):
        return self.__csio.next()

    def close(self):
        return self.__csio.close()

    def isatty(self):
        return self.__csio.isatty()

    def seek(self, pos, mode=0):
        return self.__csio.seek(pos, mode)

    def tell(self):
        return self.__csio.tell()

    def read(self, n=-1):
        return self.__csio.read(n)

    def readline(self, length=None):
        return self.__csio.readline(length)

    def readlines(self, sizehint=0):
        return self.__csio.readlines(sizehint)

    def truncate(self, size=None):
        return self.__csio.truncate(size)

    def write(self, s):
        return self.__csio.write(s)

    def writelines(self, list):
        return self.__csio.writelines(list)

    def flush(self):
        return self.__csio.flush()

    def getvalue(self):
        return self.__csio.getvalue()
Example #32
0
class UnicodeCSVWriter(object):
    """
    A CSV writer which will write rows to a file in the specified encoding.

    NB: Optimized so that eight-bit encodings skip re-encoding. See:
        https://github.com/onyxfish/csvkit/issues/175
    """
    def __init__(self, f, encoding='utf-8', **kwargs):
        self.encoding = encoding
        self._eight_bit = (self.encoding.lower().replace('_', '-') in EIGHT_BIT_ENCODINGS)

        if self._eight_bit:
            self.writer = csv.writer(f, **kwargs)
        else:
            # Redirect output to a queue for reencoding
            self.queue = StringIO()
            self.writer = csv.writer(self.queue, **kwargs)
            self.stream = f
            self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        if self._eight_bit:
            self.writer.writerow([unicode(s if s != None else '').encode(self.encoding) for s in row])
        else:
            self.writer.writerow([unicode(s if s != None else '').encode('utf-8') for s in row])
            # Fetch UTF-8 output from the queue...
            data = self.queue.getvalue()
            data = data.decode('utf-8')
            # ...and reencode it into the target encoding
            data = self.encoder.encode(data)
            # write to the file 
            self.stream.write(data)
            # empty the queue
            self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
Example #33
0
class FakeFile(_FakeNode):
    """
    In-memory file.
    """

    implements(ivfs.IFileSystemLeaf)

    def __init__(self, name=None, parent=None, data=''):
        self.data = StringIO()
        self.data.write(data)
        self.parent = parent
        self.name = name

    def open(self, flags):
        if flags & os.O_TRUNC:
            self.data.seek(0)
            self.data.truncate()
        return self

    def getMetadata(self):
        size = len(self.data.getvalue())
        self.data.seek(0)
        return {'size': size}

    def readChunk(self, offset, length):
        self.data.seek(offset)
        return self.data.read(length)

    def writeChunk(self, offset, data):
        self.data.seek(offset)
        self.data.write(data)

    def close(self):
        pass

    def children(self):
        print "this might break and if it does we should fix the caller"
        return []
Example #34
0
    def _test_display_uncollectable_cumulative(self, saveall):
        gc.set_debug(gc.DEBUG_SAVEALL)
        stream = StringIO()
        display = tracemalloc.DisplayGarbage(file=stream)
        display.cumulative = True

        # Leak 1
        UncollectableObject()

        display.display()
        output = stream.getvalue().splitlines()
        self.assertIn('UncollectableObject', output[0])
        self.assertIn(THIS_FILE, output[0])
        if saveall:
            self.assertEqual(len(output), 2)
            self.assertIn('{', output[1])
        else:
            self.assertEqual(len(output), 1)

        # Leak 2
        UncollectableObject()

        stream.seek(0)
        stream.truncate()
        display.display()
        output = stream.getvalue().splitlines()
        self.assertIn('UncollectableObject', output[0])
        self.assertIn(THIS_FILE, output[0])
        if saveall:
            self.assertEqual(len(output), 4)
            self.assertIn('{', output[1])
            self.assertIn('UncollectableObject', output[2])
            self.assertIn(THIS_FILE, output[2])
            self.assertIn('{', output[3])
        else:
            self.assertEqual(len(output), 2)
            self.assertIn('UncollectableObject', output[1])
            self.assertIn(THIS_FILE, output[1])
class UnicodeWriter:
    """
    A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.
    """
    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        self.writer.writerow([s.encode("utf-8") for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)
class UnicodeDictWriter:
    """
    A CSV writer which will write dictionaries to CSV file "f",
    which is encoded in the given encoding.
    """
    def __init__(self, f, keys, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.keys = keys
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writeheader(self):
        header = OrderedDict()
        for key in self.keys:
            header.update({key: key})
        self.writerow(header)

    def writerow(self, row):
        self.writer.writerow([
            (row.get(key) if row.get(key) else "").encode("utf-8")
            for key in self.keys
        ])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
Example #37
0
class UnicodeWriter(object):
    """
    A CSV writer which will write rows to CSV file "f", which is encoded in 
    the given encoding.
    
    """
    def __init__(self, f, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, **kwds)
        self.stream = f
        self.encoder = codecs.getencoder(encoding)

    def writerow(self, row):
        if sys.version_info[0] < 3:
            self.writer.writerow([s.encode('utf-8') for s in row])
            # Fetch UTF-8 output from the queue ...
            data = self.queue.getvalue()
            data = data.decode('utf-8')
        else:
            self.writer.writerow(row)
            data = self.queue.getvalue()
            #in python3, StringIO self.queue returns unicode!
        #data now contains the csv data in unicode
        # ... and reencode it into the target encoding
        data, length = self.encoder(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue, go to start position, then truncate
        self.queue.seek(0)
        self.queue.truncate(0)

    def writerows(self, rows):
        list(map(self.writerow, rows))

    def close(self):
        self.stream.close()
Example #38
0
class EOFReader(object):
    def __init__(self, req, unreader):
        self.req = req
        self.unreader = unreader
        self.buf = StringIO()
        self.finished = False

    def read(self, size):
        if not isinstance(size, (int, long)):
            raise TypeError("size must be an integral type")
        if size < 0:
            raise ValueError("Size must be positive.")
        if size == 0:
            return ""

        if self.finished:
            data = self.buf.getvalue()
            ret, rest = data[:size], data[size:]
            self.buf.truncate(0)
            self.buf.write(rest)
            return ret

        data = self.unreader.read()
        while data:
            self.buf.write(data)
            if self.buf.tell() > size:
                break
            data = self.unreader.read()

        if not data:
            self.finished = True

        data = self.buf.getvalue()
        ret, rest = data[:size], data[size:]
        self.buf.truncate(0)
        self.buf.write(rest)
        return ret
    class DictUnicodeWriter(object):
        """
		Code borrowed from http://stackoverflow.com/a/5838817
		"""
        def __init__(self,
                     f,
                     fieldnames,
                     dialect=csv.excel,
                     encoding="utf-8",
                     **kwds):
            # Redirect output to a queue
            self.queue = StringIO()
            self.writer = csv.DictWriter(self.queue,
                                         fieldnames,
                                         dialect=dialect,
                                         **kwds)
            self.stream = f
            self.encoder = getincrementalencoder(encoding)()

        def writerow(self, D):
            self.writer.writerow({k: v.encode("utf-8") for k, v in D.items()})
            # Fetch UTF-8 output from the queue ...
            data = self.queue.getvalue()
            data = data.decode("utf-8")
            # ... and reencode it into the target encoding
            data = self.encoder.encode(data)
            # write to the target stream
            self.stream.write(data)
            # empty queue
            self.queue.truncate(0)

        def writerows(self, rows):
            for D in rows:
                self.writerow(D)

        def writeheader(self):
            self.writer.writeheader()
Example #40
0
class BufferedReader(object):

    def __init__(self, input, buffer_size=65535):
        self.__input       = input
        self.__buffer_size = buffer_size
        self.__buffer      = StringIO()

        # initial fill
        chunk = input.read(buffer_size)
        self.__byte_count = len(chunk)
        self.__buffer.write(chunk)
        self.__buffer.seek(0)

    def read(self, byte_count):
        difference = byte_count - self.__byte_count

        if difference < 0:
            chunk = self.__buffer.read(byte_count)
            self.__byte_count -= byte_count
        else:
            chunk = self.__buffer.read() + self.__input.read(difference)

            # verify size
            if len(chunk) != byte_count:
                raise EOFError("Encountered unexpected end of stream")

            # reset internal buffer
            self.__buffer.seek(0)
            self.__buffer.truncate()

            # replenish
            fresh_chunk = self.__input.read(self.__buffer_size)
            self.__byte_count = len(fresh_chunk)
            self.__buffer.write(fresh_chunk)
            self.__buffer.seek(0)

        return chunk
Example #41
0
class UnicodeWriter:
    """
    A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.
    """
    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = getincrementalencoder(encoding)()

    def writerow(self, row):
        if IS_PY2:
            row = [
                s.encode("utf-8") if hasattr(s, 'encode') else s for s in row
            ]
        self.writer.writerow(row)
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        if IS_PY2:
            data = data.decode("utf-8")
        else:
            data = data.strip('\x00')
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        if IS_PY2:
            self.stream.write(data)
        else:
            self.stream.write(data.decode("utf-8"))
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
Example #42
0
class AccountUnicodeWriter(object):

    """
    A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.
    """

    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        # created a writer with Excel formating settings
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        # we ensure that we do not try to encode none or bool
        row = (x or u'' for x in row)

        encoded_row = [
            c.encode("utf-8") if isinstance(c, unicode) else c for c in row]

        self.writer.writerow(encoded_row)
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)
Example #43
0
class TagFactory(object):
    def __init__(self):
        self.stream = StringIO()
        self._count = 0

    def _enter_callback(self):
        self._count += 1

    def _exit_callback(self):
        self._count -= 1

    def __call__(self, *args, **kwargs):
        return Tag(self.stream,
                   _enter_callback=self._enter_callback,
                   _exit_callback=self._exit_callback,
                   *args,
                   **kwargs)

    def lines(self):
        if self.stream.tell():
            yield self.stream.getvalue()
            self.stream.truncate(0)

    def escape(self, obj):
        if self._count:
            return escapeHtml(str(obj))
        return str(obj)

    def as_is(self, obj):
        return AsIs(obj)

    def compose(self, f):
        warn(
            "Will be removed in the future. Use tag_compose instead of tag.compose",
            FutureWarning)
        return partial(tag_compose(f, __bw_compat__=True), self)
Example #44
0
def tail(filename, wait=0.1, seek=0):
    st = os.stat(filename)
    fp = open(filename, 'rb')

    # Seek to the end
    if seek <= 0:
        fp.seek(abs(seek), 2)
    else:
        fp.seek(seek)

    # Read until file is truncated, collect results in a string buffer
    size = st[stat.ST_SIZE]
    line = StringIO()
    while True:
        char = fp.read(1)

        # Check if there was something for us to collect
        if char == '':
            st = os.stat(filename)
            if st[stat.ST_SIZE] < size:
                # File truncated
                break
            else:
                try:
                    time.sleep(wait)
                except:
                    break
        else:
            line.write(char)
            # Yield lines
            if char == '\n':
                yield line.getvalue()
                line.seek(0)
                line.truncate()

    fp.close()
Example #45
0
class UnicodeWriter:
    """
    A CSV writer which will write rows to CSV file "fd",
    which is encoded in the given encoding.
    """
    def __init__(self, fd, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = StringIO()
        self.writer = csv.writer(self.queue,
                                 dialect=dialect,
                                 delimiter=';',
                                 quotechar='"',
                                 quoting=csv.QUOTE_NONNUMERIC,
                                 **kwds)
        self.stream = fd
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        self.writer.writerow([encode_value(s) for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)

    def get_stream(self):
        self.stream.seek(0)
        return self.stream
Example #46
0
class EchoTestCase(unittest.TestCase):
    """tests for the echo server and client"""
    connection_msg = 'connecting to localhost port 10000'
    sending_msg = 'sending "{0}"'
    received_msg = 'received "{0}"'
    closing_msg = 'closing socket'

    def setUp(self):
        """set up our tests"""
        if not hasattr(self, 'buff'):
            # ensure we have a buffer for the client to write to
            self.log = StringIO()
        else:
            # ensure that the buffer is set to the start for the next test
            self.log.seek(0)

    def tearDown(self):
        """clean up after ourselves"""
        if hasattr(self, 'buff'):
            # clear our buffer for the next test
            self.log.seek(0)
            self.log.truncate()

    def send_message(self, message):
        """Attempt to send a message using the client and the test buffer

        In case of a socket error, fail and report the problem
        """
        try:
            client(message, self.log)
        except socket.error, e:
            if e.errno == 61:
                msg = "Error: {0}, is the server running?"
                self.fail(msg.format(e.strerror))
            else:
                self.fail("Unexpected Error: {0}".format(str(e)))
Example #47
0
    def parse(self, unreader):
        if c_parse_request:
            ret = c_parse_request(self, unreader.read)
            if ret is None:
                raise StopIteration()
            return ret

        buf = StringIO()

        self.get_data(unreader, buf, stop=True)

        # Request line
        idx = buf.getvalue().find("\r\n")
        while idx < 0:
            self.get_data(unreader, buf)
            idx = buf.getvalue().find("\r\n")
        self.parse_request_line(buf.getvalue()[:idx])
        rest = buf.getvalue()[idx + 2:]  # Skip \r\n
        buf.truncate(0)
        buf.write(rest)

        # Headers
        idx = buf.getvalue().find("\r\n\r\n")
        done = buf.getvalue()[:2] == "\r\n"
        while idx < 0 and not done:
            self.get_data(unreader, buf)
            idx = buf.getvalue().find("\r\n\r\n")
            done = buf.getvalue()[:2] == "\r\n"
        if done:
            self.unreader.unread(buf.getvalue()[2:])
            return ""
        self.headers = self.parse_headers(buf.getvalue()[:idx])

        ret = buf.getvalue()[idx + 4:]
        buf.truncate(0)
        return ret
Example #48
0
class Unreader(object):
    def __init__(self, release_fun=None):
        self.buf = StringIO()
        self.release_fun = release_fun

    def chunk(self):
        raise NotImplementedError()

    def read(self, size=None):
        if size is not None and not isinstance(size, (int, long)):
            raise TypeError("size parameter must be an int or long.")
        if size == 0:
            return ""
        if size < 0:
            size = None

        self.buf.seek(0, os.SEEK_END)

        if size is None and self.buf.tell():
            ret = self.buf.getvalue()
            self.buf.truncate(0)
            return ret
        if size is None:
            return self.chunk()

        while self.buf.tell() < size:
            chunk = self.chunk()
            if not len(chunk):
                ret = self.buf.getvalue()
                self.buf.truncate(0)
                return ret
            self.buf.write(chunk)
        data = self.buf.getvalue()
        self.buf.truncate(0)
        self.buf.write(data[size:])
        return data[:size]

    def unread(self, data):
        self.buf.seek(0, os.SEEK_END)
        self.buf.write(data)

    def close(self):
        return None

    def release(self):
        if callable(self.release_fun):
            self.release_fun()
Example #49
0
def test_tsv_writer():
    from cStringIO import StringIO
    io = StringIO()
    converter = Combined2Tsv(FIELDS, io)

    # Header output
    converter.write(FIELDS)
    ret = io.getvalue().split('\t')
    assert len(ret) == len(FIELDS), ret
    io.truncate(0)

    # null output
    converter.write({})
    ret = io.getvalue().split('\t')
    assert len(ret) == len(FIELDS), ret
    io.truncate(0)

    # valid output
    converter.write({
        'time': datetime.datetime.now(),
        'host': '',
        'path': '',
        'query': '',
        'method': '',
        'protocol': '',
        'status': 200,
        'size': 123,
        'referer': '',
        'ua': '',
        'ident': '',
        'user': '',
        'trailing': ''
    })
    ret = io.getvalue().split('\t')
    assert len(ret) == len(FIELDS), ret
    io.truncate(0)
    def __init__(self, base_body, max_size=1 * 1024 * 1024):
        """Initializes the instance.

        @param base_body: A JsonObject or dict containing the information to send as the body of the add_events
            request, with the exception of the events field. The events and client_timestamp fields must not be
            included because they will be added later. Note, base_body must have some fields set, such as 'ts' which is
            required by the server.
        @param max_size: The maximum number of bytes this request can consume when it is serialized to JSON.
        """
        assert len(base_body
                   ) > 0, "The base_body object must have some fields defined."
        assert not 'events' in base_body, "The base_body object cannot already have 'events' set."
        assert not 'client_time' in base_body, "The base_body object cannot already have 'client_time' set."

        # As an optimization, we use a StringIO object to serialize the request.  We also
        # do a little bit of the JSON object assembly by hand.  Specifically, we serialize the request
        # to JSON without the 'events' field, but then delete the last '}' so that we can manually
        # add in the 'events: [ ... ]' ourselves.  This way we can watch the size of the buffer as
        # we build up events.
        string_buffer = StringIO()
        json_lib.serialize(base_body,
                           output=string_buffer,
                           use_fast_encoding=True)

        # Now go back and find the last '}' and delete it so that we can open up the JSON again.
        location = string_buffer.tell()
        while location > 0:
            location -= 1
            string_buffer.seek(location)
            if string_buffer.read(1) == '}':
                break

        # Now look for the first non-white character.  We need to add in a comma after it.
        last_char = None
        while location > 0:
            location -= 1
            string_buffer.seek(location)
            last_char = string_buffer.read(1)
            if not last_char.isspace():
                break

        # If the character happened to a comma, back up over that since we want to write our own comma.
        if location > 0 and last_char == ',':
            location -= 1

        if location < 0:
            raise Exception(
                'Could not locate trailing "}" and non-whitespace in base JSON for add events request'
            )

        # Now chop off everything after the character at the location.
        location += 1
        string_buffer.seek(location)
        string_buffer.truncate()

        # Append the start of our events field.
        string_buffer.write(', events: [')

        # This buffer keeps track of all of the stuff that must be appended after the events JSON array to terminate
        # the request.  That includes both the threads JSON array and the client timestamp.
        self.__post_fix_buffer = PostFixBuffer(
            '], threads: THREADS, client_time: TIMESTAMP }')

        # The time that will be sent as the 'client_time' parameter for the addEvents request.
        # This may be later updated using the set_client_time method in the case where the same AddEventsRequest
        # is being reused to send the events again.
        self.__post_fix_buffer.set_client_timestamp(time.time())

        self.__buffer = string_buffer
        self.__max_size = max_size

        self.__events_added = 0

        # If we have finished serializing the body, it is stored here until the close() method is invoked.
        self.__body = None
Example #51
0
class NatCheck(object):

    def __init__(self, resultfunc, downloadid, peerid, ip, port, rawserver):
        self.resultfunc = resultfunc
        self.downloadid = downloadid
        self.peerid = peerid
        self.ip = ip
        self.port = port
        self.closed = False
        self.buffer = StringIO()
        self.next_len = 1
        self.next_func = self.read_header_len
        try:
            self.connection = rawserver.start_connection((ip, port), self)
            self.connection.write(chr(len(protocol_name)) + protocol_name +
                (chr(0) * 8) + downloadid)
        except socketerror:
            self.answer(False)
        except IOError:
            self.answer(False)

    def answer(self, result):
        self.closed = True
        try:
            self.connection.close()
        except AttributeError:
            pass
        self.resultfunc(result, self.downloadid, self.peerid, self.ip, self.port)

    def read_header_len(self, s):
        if ord(s) != len(protocol_name):
            return None
        return len(protocol_name), self.read_header

    def read_header(self, s):
        if s != protocol_name:
            return None
        return 8, self.read_reserved

    def read_reserved(self, s):
        return 20, self.read_download_id

    def read_download_id(self, s):
        if s != self.downloadid:
            return None
        return 20, self.read_peer_id

    def read_peer_id(self, s):
        if s != self.peerid:
            return None
        self.answer(True)
        return None

    def data_came_in(self, connection, s):
        while True:
            if self.closed:
                return
            i = self.next_len - self.buffer.tell()
            if i > len(s):
                self.buffer.write(s)
                return
            self.buffer.write(s[:i])
            s = s[i:]
            m = self.buffer.getvalue()
            self.buffer.reset()
            self.buffer.truncate()
            x = self.next_func(m)
            if x is None:
                if not self.closed:
                    self.answer(False)
                return
            self.next_len, self.next_func = x

    def connection_lost(self, connection):
        if not self.closed:
            self.closed = True
            self.resultfunc(False, self.downloadid, self.peerid, self.ip, self.port)

    def connection_flushed(self, connection):
        pass
Example #52
0
class ReturnConnection:
    def __init__(self,handler,singsock,rawserver,locally_initiated = False,
                 specified_dns = None, ext_handshake = False,options = None):
        # Called by network thread
        self.handler = handler        
        self.singsock = singsock # for writing
        self.rawserver = rawserver
        self.buffer = StringIO()
        self.cb_queue = []
        self.listen_port = None
        self.options = None
        self.locally_initiated = locally_initiated
        self.specified_dns = specified_dns
        self.last_use = time()

        self.state = STATE_INITIAL
        self.write(chr(len(protocol_name)) + protocol_name + 
                option_pattern + dialback_infohash + self.handler.get_my_peer_id())
        if ext_handshake:
            self.state = STATE_HS_PEERID_WAIT
            self.next_len = 20
            self.next_func = self.read_peer_id
            self.set_options(options)
        else:
            self.state = STATE_HS_FULL_WAIT
            self.next_len = 1
            self.next_func = self.read_header_len
            
        # Leave autoclose here instead of ReturnConnHandler, as that doesn't record
        # remotely-initiated ReturnConnections before authentication is done.
        self.rawserver.add_task(self._dlbconn_auto_close, EXPIRE_CHECK_INTERVAL)

    #
    # Interface for SocketHandler
    #
    def data_came_in(self, singsock, data):
        """ sockethandler received data """
        # now we got something we can ask for the peer's real port
        dummy_port = singsock.get_port(True)

        if DEBUG:
            print >> sys.stderr,"dlbconn: data_came_in",singsock.get_ip(),singsock.get_port()
        self.handler.measurefunc(len(data))
        self.last_use = time()
        while 1:
            if self.state == STATE_CLOSED:
                return
            i = self.next_len - self.buffer.tell()
            if i > len(data):
                self.buffer.write(data)
                return
            self.buffer.write(data[:i])
            data = data[i:]
            m = self.buffer.getvalue()
            self.buffer.reset()
            self.buffer.truncate()
            try:
                #if DEBUG:
                #    print >> sys.stderr,"dlbconn: Trying to read",self.next_len,"using",self.next_func
                x = self.next_func(m)
            except:
                self.next_len, self.next_func = 1, self.read_dead
                if DEBUG:
                    print_exc(file=sys.stderr)
                raise
            if x is None:
                if DEBUG:
                    print >> sys.stderr,"dlbconn: next_func returned None",self.next_func
                self.close()
                return
            self.next_len, self.next_func = x

    def connection_lost(self,singsock):
        """ kernel or socket handler reports connection lost """
        if DEBUG:
            print >> sys.stderr,"dlbconn: connection_lost",singsock.get_ip(),singsock.get_port(),self.state
        if self.state != STATE_CLOSED:
            self.state = STATE_CLOSED
            self.handler.connection_lost(self)

    def connection_flushed(self,singsock):
        """ sockethandler flushes connection """
        pass

    # 
    # Interface for ReturnConnHandler
    #
    def send_message(self,message):
        self.last_use = time()
        s = tobinary(len(message))+message
        if DEBUG:
            print >> sys.stderr,"dlbconn: Sending message",len(message)
        self.write(s)

    def is_locally_initiated(self):
        return self.locally_initiated

    def get_ip(self):
        return self.singsock.get_ip()

    def get_port(self):
        return self.singsock.get_port()

    def get_listen_port(self):
        return self.listen_port

    def queue_callback(self,dns,callback):
        if callback is not None:
            self.cb_queue.append(callback)

    def dequeue_callbacks(self):
        try:
            for callback in self.cb_queue:
                callback(None,self.specified_dns)
            self.cb_queue = []
        except Exception,e:
            print_exc(file=sys.stderr)
Example #53
0
class StreamCheck:
    def __init__(self):
        global streamno
        self.no = streamno
        streamno += 1
        self.buffer = StringIO()
        self.next_len, self.next_func = 1, self.read_header_len

    def read_header_len(self, s):
        if ord(s) != len(protocol_name):
            print self.no, 'BAD HEADER LENGTH'
        return len(protocol_name), self.read_header

    def read_header(self, s):
        if s != protocol_name:
            print self.no, 'BAD HEADER'
        return 8, self.read_reserved

    def read_reserved(self, s):
        return 20, self.read_download_id

    def read_download_id(self, s):
        if DEBUG:
            print self.no, 'download ID ' + tohex(s)
        return 20, self.read_peer_id

    def read_peer_id(self, s):
        if DEBUG:
            print self.no, 'peer ID' + make_readable(s)
        return 4, self.read_len

    def read_len(self, s):
        l = toint(s)
        if l > 2**23:
            print self.no, 'BAD LENGTH: ' + str(l) + ' (' + s + ')'
        return l, self.read_message

    def read_message(self, s):
        if not s:
            return 4, self.read_len
        m = s[0]
        if ord(m) > 8:
            print self.no, 'BAD MESSAGE: ' + str(ord(m))
        if m == Connecter.REQUEST:
            if len(s) != 13:
                print self.no, 'BAD REQUEST SIZE: ' + str(len(s))
                return 4, self.read_len
            index = toint(s[1:5])
            begin = toint(s[5:9])
            length = toint(s[9:])
            print self.no, 'Request: ' + str(index) + ': ' + str(
                begin) + '-' + str(begin) + '+' + str(length)
        elif m == Connecter.CANCEL:
            if len(s) != 13:
                print self.no, 'BAD CANCEL SIZE: ' + str(len(s))
                return 4, self.read_len
            index = toint(s[1:5])
            begin = toint(s[5:9])
            length = toint(s[9:])
            print self.no, 'Cancel: ' + str(index) + ': ' + str(
                begin) + '-' + str(begin) + '+' + str(length)
        elif m == Connecter.PIECE:
            index = toint(s[1:5])
            begin = toint(s[5:9])
            length = len(s) - 9
            print self.no, 'Piece: ' + str(index) + ': ' + str(
                begin) + '-' + str(begin) + '+' + str(length)
        else:
            print self.no, 'Message ' + str(ord(m)) + ' (length ' + str(
                len(s)) + ')'
        return 4, self.read_len

    def write(self, s):
        while 1:
            i = self.next_len - self.buffer.tell()
            if i > len(s):
                self.buffer.write(s)
                return
            self.buffer.write(s[:i])
            s = s[i:]
            m = self.buffer.getvalue()
            self.buffer.reset()
            self.buffer.truncate()
            x = self.next_func(m)
            self.next_len, self.next_func = x
Example #54
0
class Burner(object):
    """Burner class."""
    def __init__(self):
        """Constructor."""
        self.url_opener = SimpleOpener()
        self._feed_buf = StringIO()
        self.nodes = []
        self.instances = []
        self.to_rem = []
        self.queued_ops = []
        self.opts = None
        self.queue_retry = False
        self.disk_count = self.disk_growth = self.disk_size = None
        self.hvp = self.bep = None
        self.ParseOptions()
        self.cl = cli.GetClient()
        self.GetState()

    def ClearFeedbackBuf(self):
        """Clear the feedback buffer."""
        self._feed_buf.truncate(0)

    def GetFeedbackBuf(self):
        """Return the contents of the buffer."""
        return self._feed_buf.getvalue()

    def Feedback(self, msg):
        """Acumulate feedback in our buffer."""
        formatted_msg = "%s %s" % (time.ctime(utils.MergeTime(msg[0])), msg[2])
        self._feed_buf.write(formatted_msg + "\n")
        if self.opts.verbose:
            Log(formatted_msg, indent=3)

    def MaybeRetry(self, retry_count, msg, fn, *args):
        """Possibly retry a given function execution.

    @type retry_count: int
    @param retry_count: retry counter:
        - 0: non-retryable action
        - 1: last retry for a retryable action
        - MAX_RETRIES: original try for a retryable action
    @type msg: str
    @param msg: the kind of the operation
    @type fn: callable
    @param fn: the function to be called

    """
        try:
            val = fn(*args)
            if retry_count > 0 and retry_count < MAX_RETRIES:
                Log("Idempotent %s succeeded after %d retries", msg,
                    MAX_RETRIES - retry_count)
            return val
        except Exception, err:  # pylint: disable=W0703
            if retry_count == 0:
                Log("Non-idempotent %s failed, aborting", msg)
                raise
            elif retry_count == 1:
                Log("Idempotent %s repeated failure, aborting", msg)
                raise
            else:
                Log("Idempotent %s failed, retry #%d/%d: %s", msg,
                    MAX_RETRIES - retry_count + 1, MAX_RETRIES, err)
                self.MaybeRetry(retry_count - 1, msg, fn, *args)
Example #55
0
    def test_deserialize_messages(self):
        import rospy.msg
        from test_rospy.msg import Val
        num_tests = 10
        teststrs = [
            'foostr-%s' % random.randint(0, 10000)
            for i in xrange(0, num_tests)
        ]
        valids = []
        for t in teststrs:
            fmt = "<II%ss" % len(t)
            size = struct.calcsize(fmt) - 4
            valids.append(struct.pack(fmt, size, len(t), t))
        data_class = Val

        def validate_vals(vals, teststrs=teststrs):
            for i, v in zip(range(0, len(vals)), vals):
                self.assert_(isinstance(v, Val))
                self.assertEquals(teststrs[i], v.val)

        b = StringIO()
        msg_queue = []

        #test with null buff
        try:
            rospy.msg.deserialize_messages(None, msg_queue, data_class)
        except roslib.message.DeserializationError:
            pass
        #test will null msg_queue
        try:
            rospy.msg.deserialize_messages(b, None, data_class)
        except roslib.message.DeserializationError:
            pass
        #test with empty buff
        rospy.msg.deserialize_messages(b, msg_queue, data_class)
        self.assertEquals(0, len(msg_queue))
        self.assertEquals(0, b.tell())

        #deserialize a simple value
        b.truncate(0)
        b.write(valids[0])
        rospy.msg.deserialize_messages(b, msg_queue, data_class)
        self.assertEquals(1, len(msg_queue))
        validate_vals(msg_queue)
        # - buffer should be reset
        self.assertEquals(0, b.tell())
        del msg_queue[:]

        #verify deserialize does not read past b.tell()
        b.truncate(0)
        b.write(valids[0])
        b.write(valids[1])
        b.seek(len(valids[0]))
        rospy.msg.deserialize_messages(b, msg_queue, data_class)
        self.assertEquals(1, len(msg_queue))
        validate_vals(msg_queue)
        # - buffer should be reset
        self.assertEquals(0, b.tell())

        del msg_queue[:]

        #deserialize an incomplete message
        b.truncate(0)
        b.write(valids[0][:-1])
        rospy.msg.deserialize_messages(b, msg_queue, data_class)
        self.failIf(
            msg_queue,
            "deserialize of an incomplete buffer returned %s" % msg_queue)

        del msg_queue[:]

        #deserialize with extra data leftover
        b.truncate(0)
        b.write(valids[0] + 'leftovers')
        rospy.msg.deserialize_messages(b, msg_queue, data_class)
        self.assertEquals(1, len(msg_queue))
        validate_vals(msg_queue)
        # - leftovers should be pushed to the front of the buffer
        self.assertEquals('leftovers', b.getvalue())

        del msg_queue[:]

        #deserialize multiple values
        b.truncate(0)
        for v in valids:
            b.write(v)
        rospy.msg.deserialize_messages(b, msg_queue, data_class)
        self.assertEquals(len(valids), len(msg_queue))
        validate_vals(msg_queue)
        # - buffer should be reset
        self.assertEquals(0, b.tell())

        del msg_queue[:]

        #deserialize multiple values with max_msgs
        max_msgs = 5
        b.truncate(0)
        for v in valids:
            b.write(v)
        rospy.msg.deserialize_messages(b,
                                       msg_queue,
                                       data_class,
                                       max_msgs=max_msgs)
        self.assertEquals(max_msgs, len(msg_queue))
        validate_vals(msg_queue)
        # - buffer should be have remaining msgs
        b2 = StringIO()
        for v in valids[max_msgs:]:
            b2.write(v)
        self.assertEquals(b.getvalue(), b2.getvalue())

        #deserialize rest and verify that msg_queue is appended to
        rospy.msg.deserialize_messages(b, msg_queue, data_class)
        self.assertEquals(len(valids), len(msg_queue))
        validate_vals(msg_queue)

        del msg_queue[:]

        #deserialize multiple values with queue_size
        queue_size = 5
        b.truncate(0)
        for v in valids:
            b.write(v)
        # fill queue with junk
        msg_queue = [1, 2, 3, 4, 5, 6, 7, 9, 10, 11]
        rospy.msg.deserialize_messages(b,
                                       msg_queue,
                                       data_class,
                                       queue_size=queue_size)
        self.assertEquals(queue_size, len(msg_queue))
        # - msg_queue should have the most recent values only
        validate_vals(msg_queue, teststrs[-queue_size:])
        # - buffer should be reset
        self.assertEquals(0, b.tell())

        #deserialize multiple values with max_msgs and queue_size
        queue_size = 5
        max_msgs = 5
        b.truncate(0)
        for v in valids:
            b.write(v)
        # fill queue with junk
        msg_queue = [1, 2, 3, 4, 5, 6, 7, 9, 10, 11]
        rospy.msg.deserialize_messages(b,
                                       msg_queue,
                                       data_class,
                                       max_msgs=max_msgs,
                                       queue_size=queue_size)
        self.assertEquals(queue_size, len(msg_queue))
        # - msg_queue should have the oldest messages
        validate_vals(msg_queue)
        # - buffer should be have remaining msgs
        b2 = StringIO()
        for v in valids[max_msgs:]:
            b2.write(v)
        self.assertEquals(b.getvalue(), b2.getvalue())
Example #56
0
class PluginController(PluginControllerBase):
    """
    TODO: Doc string.
    """
    def __init__(self, id, available_plugins, mapper_manager):
        PluginControllerBase.__init__(self, id, available_plugins,
                                      mapper_manager)
        self._active_plugin = None
        self.last_command_information = None
        self._buffer = StringIO()

    def setActivePlugin(self, plugin):
        self._active_plugin = plugin

    def processCommandInput(self, prompt, username, current_path,
                            command_string, interactive):
        """
        Receives the prompt that the current session has, the actual command_string that
        the user typed and if the command is interactive. If it is interactive the
        plugin controller does not choose a new active plugin but use the one the
        is already set (if none is set it raises an exeception).

        If always returns an string. It could be modified by the active plugin or, if
        there is none available, it will return the original command_string.
        """

        if interactive:
            return None
        else:
            self._disable_active_plugin()

        choosen_plugin = self._get_plugins_by_input(command_string)
        if choosen_plugin is None:
            model.api.devlog(
                "There is no active plugin to handle current command/user input"
            )
            return None
        self._active_plugin = choosen_plugin

        modified_cmd_string = self._active_plugin.processCommandString(
            username, current_path, command_string)

        if self._is_command_malformed(command_string, modified_cmd_string):
            return None
        else:
            cmd_info = CommandRunInformation(
                **{
                    'workspace': model.api.getActiveWorkspace().name,
                    'itime': time(),
                    'command': command_string.split()[0],
                    'params': ' '.join(command_string.split()[1:])
                })
            self._mapper_manager.save(cmd_info)

            self.last_command_information = cmd_info

            return modified_cmd_string if isinstance(modified_cmd_string,
                                                     basestring) else None

    def storeCommandOutput(self, output):
        """
        Receives and output string and stores it in the buffer. Returns False
        if the output was not added to the plugin controllers buffer. Returns
        True otherwise.
        """
        if not self.getActivePluginStatus():
            return False
        else:
            self._buffer.write(output)
            return True

    def getPluginAutocompleteOptions(self, prompt, username, current_path,
                                     command_string, interactive):
        """
        This method should return a list of possible completitions based on the
        current output.
        TODO: We should think how to actually implement this...
        May be checking which plugin should handle the command in the current input
        and then passing it to the plugin to return a list of possible values.
        Each plugin implementation should return possible option according to
        what was received since it's the plugin the one it knows the command line
        parameters, etc.
        """
        if interactive:
            return None
        else:
            self._disable_active_plugin()

        choosen_plugin = self._get_plugins_by_input(command_string)
        if choosen_plugin is None:
            model.api.devlog(
                "There is no active plugin to handle current command/user input"
            )
            return None

        self._active_plugin = choosen_plugin

        new_options = self._active_plugin.getCompletitionSuggestionsList(
            command_string)
        return new_options

    def getActivePluginStatus(self):
        """
        Returns true if an active plugin is set, otherwise return False.
        """
        return (self._active_plugin is not None)

    def _disable_active_plugin(self):
        """
        This method is suppose to disable the active plugin.
        """
        model.api.devlog("Disabling active plugin")
        self._active_plugin = None

    def onCommandFinished(self):
        """
        This method is called when the last executed command has finished. It's
        in charge of giving the plugin the output to be parsed.
        """
        cmd_info = self.last_command_information
        cmd_info.duration = time() - cmd_info.itime
        self._mapper_manager.save(cmd_info)

        if self._active_plugin.has_custom_output():
            if not os.path.isfile(self._active_plugin.get_custom_file_path()):
                model.api.devlog(
                    "Report file PluginController output file (%s) not created"
                    % self._active_plugin.get_custom_file_path())
                return False
            output_file = open(self._active_plugin.get_custom_file_path(), 'r')
            output = output_file.read()
            self._buffer.seek(0)
            self._buffer.truncate()
            self._buffer.write(output)

        self.processOutput(self._active_plugin, self._buffer.getvalue())

        self._buffer.seek(0)
        self._buffer.truncate()
        model.api.devlog("PluginController buffer cleared")

        self._disable_active_plugin()

        return True
Example #57
0
class RecordWriter(object):
    def __init__(self, ofile, maxresultrows=None):
        self._maxresultrows = 50000 if maxresultrows is None else maxresultrows

        self._ofile = ofile
        self._fieldnames = None
        self._buffer = StringIO()

        self._writer = csv.writer(self._buffer, dialect=CsvDialect)
        self._writerow = self._writer.writerow
        self._finished = False
        self._flushed = False

        self._inspector = OrderedDict()
        self._chunk_count = 0
        self._record_count = 0
        self._total_record_count = 0L

    @property
    def is_flushed(self):
        return self._flushed

    @is_flushed.setter
    def is_flushed(self, value):
        self._flushed = True if value else False

    @property
    def ofile(self):
        return self._ofile

    @ofile.setter
    def ofile(self, value):
        self._ofile = value

    def flush(self, finished=None, partial=None):
        assert finished is None or isinstance(finished, bool)
        assert partial is None or isinstance(partial, bool)
        assert not (finished is None and partial is None)
        assert finished is None or partial is None
        self._ensure_validity()

    def write_message(self, message_type, message_text, *args, **kwargs):
        self._ensure_validity()
        self._inspector.setdefault('messages', []).append(
            (message_type, message_text.format(*args, **kwargs)))

    def write_record(self, record):
        self._ensure_validity()
        self._write_record(record)

    def write_records(self, records):
        self._ensure_validity()
        write_record = self._write_record
        for record in records:
            write_record(record)

    def _clear(self):
        self._buffer.reset()
        self._buffer.truncate()
        self._inspector.clear()
        self._record_count = 0
        self._flushed = False

    def _ensure_validity(self):
        if self._finished is True:
            assert self._record_count == 0 and len(self._inspector) == 0
            raise RuntimeError('I/O operation on closed record writer')

    def _write_record(self, record):

        fieldnames = self._fieldnames

        if fieldnames is None:
            self._fieldnames = fieldnames = record.keys()
            value_list = imap(lambda fn: unicode(fn).encode('utf-8'),
                              fieldnames)
            value_list = imap(lambda fn: (fn, b'__mv_' + fn), value_list)
            self._writerow(list(chain.from_iterable(value_list)))

        get_value = record.get
        values = []

        for fieldname in fieldnames:
            value = get_value(fieldname, None)

            if value is None:
                values += (None, None)
                continue

            value_t = type(value)

            if issubclass(value_t, (list, tuple)):

                if len(value) == 0:
                    values += (None, None)
                    continue

                if len(value) > 1:
                    value_list = value
                    sv = b''
                    mv = b'$'

                    for value in value_list:

                        if value is None:
                            sv += b'\n'
                            mv += b'$;$'
                            continue

                        value_t = type(value)

                        if value_t is not bytes:

                            if value_t is bool:
                                value = str(value.real)
                            elif value_t is unicode:
                                value = value.encode('utf-8',
                                                     errors='backslashreplace')
                            elif value_t is int or value_t is long or value_t is float or value_t is complex:
                                value = str(value)
                            elif issubclass(value_t, (dict, list, tuple)):
                                value = str(''.join(
                                    RecordWriter._iterencode_json(value, 0)))
                            else:
                                value = repr(value).encode(
                                    'utf-8', errors='backslashreplace')

                        sv += value + b'\n'
                        mv += value.replace(b'$', b'$$') + b'$;$'

                    values += (sv[:-1], mv[:-2])
                    continue

                value = value[0]
                value_t = type(value)

            if value_t is bool:
                values += (str(value.real), None)
                continue

            if value_t is bytes:
                values += (value, None)
                continue

            if value_t is unicode:
                values += (value.encode('utf-8',
                                        errors='backslashreplace'), None)
                continue

            if value_t is int or value_t is long or value_t is float or value_t is complex:
                values += (str(value), None)
                continue

            if issubclass(value_t, dict):
                values += (str(''.join(RecordWriter._iterencode_json(value,
                                                                     0))),
                           None)
                continue

            values += (repr(value).encode('utf-8',
                                          errors='backslashreplace'), None)

        self._writerow(values)
        self._record_count += 1

        if self._record_count >= self._maxresultrows:
            self.flush(partial=True)

    try:
        # noinspection PyUnresolvedReferences
        from _json import make_encoder
    except ImportError:
        # We may be running under PyPy 2.5 which does not include the _json module
        _iterencode_json = JSONEncoder(separators=(',', ':')).iterencode
    else:
        # Creating _iterencode_json this way yields a two-fold performance improvement on Python 2.7.9 and 2.7.10
        from json.encoder import encode_basestring_ascii

        @staticmethod
        def _default(o):
            raise TypeError(repr(o) + ' is not JSON serializable')

        _iterencode_json = make_encoder(
            {},  # markers (for detecting circular references)
            _default,  # object_encoder
            encode_basestring_ascii,  # string_encoder
            None,  # indent
            ':',
            ',',  # separators
            False,  # sort_keys
            False,  # skip_keys
            True  # allow_nan
        )

        del make_encoder
Example #58
0
    lectura = str(binascii.hexlify(lectura).decode('utf-8'))
    ##Add to buffer -> actual lecture
    s.write(lectura)

    #Send buffer ever  x second
    if ((time.time() - t) >= 5):
        t = time.time()
        time_start2 = time.time()
        print "Packcage send: " + str(count) + " Seconds: " + str(
            (time.time() - t))
        print "Start : " + str(time_start2)
        mqttc.publish("hello/world", s.getvalue(), 2)
        print "Finished " + str((time.time() - time_start2))
        count = count + 1
        #Clear buffer
        s.truncate(0)
        s.seek(0)
        #s.write("----------init-"+str(count)+"----------------------------")
        #f.write("-init-"+str(count)+"----------------------------")
        #print s.getvalue()

##  print("--- %s seconds ---" % (time.time() - time_start))
##  lect_s=str(lectura)
##  lect_s= re.sub(r'\\.','', lect_s)[2:]
##    print(lectura)
    f.write(lectura)

mqttc.publish("hello/world", s.getvalue(), 2)
print "Finished que "
count = count + 1
Example #59
0
    def test_serialize_message(self):
        import rospy.msg
        import rospy.rostime
        # have to fake-init rostime so that Header can be stamped
        rospy.rostime.set_rostime_initialized(True)

        buff = StringIO()
        seq = random.randint(1, 1000)

        #serialize_message(seq, msg)
        from test_rospy.msg import Val

        #serialize a simple 'Val' with a string in it
        teststr = 'foostr-%s' % time.time()
        val = Val(teststr)

        fmt = "<II%ss" % len(teststr)
        size = struct.calcsize(fmt) - 4
        valid = struct.pack(fmt, size, len(teststr), teststr)

        rospy.msg.serialize_message(buff, seq, val)

        self.assertEquals(valid, buff.getvalue())

        #test repeated serialization
        rospy.msg.serialize_message(buff, seq, val)
        rospy.msg.serialize_message(buff, seq, val)
        self.assertEquals(valid * 3, buff.getvalue())

        # - once more just to make sure that the buffer position is
        # being preserved properly
        buff.seek(0)
        rospy.msg.serialize_message(buff, seq, val)
        self.assertEquals(valid * 3, buff.getvalue())
        rospy.msg.serialize_message(buff, seq, val)
        self.assertEquals(valid * 3, buff.getvalue())
        rospy.msg.serialize_message(buff, seq, val)
        self.assertEquals(valid * 3, buff.getvalue())

        #test sequence parameter
        buff.truncate(0)

        from test_rospy.msg import HeaderVal
        t = rospy.Time.now()
        t.secs = t.secs - 1  # move it back in time
        h = rospy.Header(None, rospy.Time.now(), teststr)
        h.stamp = t
        val = HeaderVal(h, teststr)
        seq += 1

        rospy.msg.serialize_message(buff, seq, val)
        self.assertEquals(val.header, h)
        self.assertEquals(seq, h.seq)
        #should not have been changed
        self.assertEquals(t, h.stamp)
        self.assertEquals(teststr, h.frame_id)

        #test frame_id setting
        h.frame_id = None
        rospy.msg.serialize_message(buff, seq, val)
        self.assertEquals(val.header, h)
        self.assertEquals('0', h.frame_id)
Example #60
0
class WebSocket:
    collector = None
    producer = None

    def __init__(self, handler, request, message_encoding=None):
        self.handler = handler
        self.wasc = handler.wasc
        self.request = request
        self.channel = request.channel
        self.channel.set_terminator(2)
        self.rfile = BytesIO()
        self.masks = b""
        self.has_masks = True
        self.buf = b""
        self.payload_length = 0
        self.opcode = None
        self.default_op_code = OPCODE_TEXT
        self._closed = False
        self.encoder_config = None
        self.message_encoding = self.setup_encoding(message_encoding)

    def close(self):
        if self._closed: return
        self._closed = True

    def closed(self):
        return self._closed

    def setup_encoding(self, message_encoding):
        if message_encoding == skitai.WS_MSG_GRPC:
            i, o = discover.find_type(request.uri[1:])
            self.encoder_config = (i[0], 0 [0])
            self.default_op_code = OP_BINARY
            self.message_encode = self.grpc_encode
            self.message_decode = self.grpc_decode
        elif message_encoding == skitai.WS_MSG_JSON:
            self.message_encode = json.dumps
            self.message_decode = json.loads
        elif message_encoding == skitai.WS_MSG_XMLRPC:
            self.message_encode = xmlrpclib.dumps
            self.message_decode = xmlrpclib.loads
        else:
            self.message_encode = self.transport
            self.message_decode = self.transport
        return message_encoding

    def transport(self, msg):
        return msg

    def grpc_encode(self, msg):
        f = self.encoder_config[0]()
        f.ParseFromString(msg)
        return f

    def grpc_decode(self, msg):
        return msg.SerializeToString()

    def _tobytes(self, b):
        if sys.version_info[0] < 3:
            return map(ord, b)
        else:
            return b

    def collect_incoming_data(self, data):
        #print (">>>>", data)
        if not data:
            # closed connection
            self.close()
            return

        if self.masks or (not self.has_masks and self.payload_length):
            self.rfile.write(data)
        else:
            self.buf += data

    def found_terminator(self):
        buf, self.buf = self.buf, b""
        if self.masks or (not self.has_masks and self.payload_length):
            # end of message
            masked_data = bytearray(self.rfile.getvalue())
            if self.masks:
                masking_key = bytearray(self.masks)
                data = bytearray([
                    masked_data[i] ^ masking_key[i % 4]
                    for i in range(len(masked_data))
                ])
            else:
                data = masked_data

            if self.opcode == OPCODE_TEXT:
                # text
                data = data.decode('utf-8')

            self.payload_length = 0
            self.opcode = None
            self.masks = b""
            self.has_masks = True
            self.rfile.seek(0)
            self.rfile.truncate()
            self.channel.set_terminator(2)
            if self.opcode == OPCODE_PING:
                self.send(data, OPCODE_PONG)
            else:
                self.handle_message(data)

        elif self.payload_length:
            self.masks = buf
            self.channel.set_terminator(self.payload_length)

        elif self.opcode:
            if len(buf) == 2:
                fmt = ">H"
            else:
                fmt = ">Q"
            self.payload_length = struct.unpack(fmt, self._tobytes(buf))[0]
            if self.has_masks:
                self.channel.set_terminator(4)  # mask
            else:
                self.channel.set_terminator(self.payload_length)

        elif self.opcode is None:
            b1, b2 = self._tobytes(buf)
            fin = b1 & FIN
            self.opcode = b1 & OPCODE
            #print (fin, self.opcode)
            if self.opcode == OPCODE_CLOSE:
                self.close()
                return

            mask = b2 & MASKED
            if not mask:
                self.has_masks = False

            payload_length = b2 & PAYLOAD_LEN
            if payload_length == 0:
                self.opcode = None
                self.has_masks = True
                self.channel.set_terminator(2)
                return

            if payload_length < 126:
                self.payload_length = payload_length
                if self.has_masks:
                    self.channel.set_terminator(4)  # mask
                else:
                    self.channel.set_terminator(self.payload_length)
            elif payload_length == 126:
                self.channel.set_terminator(2)  # short length
            elif payload_length == 127:
                self.channel.set_terminator(8)  # long length

        else:
            raise AssertionError("Web socket frame decode error")

    def build_data(self, message, op_code):
        if has_werkzeug and isinstance(message, ClosingIterator):
            msgs = []
            for msg in message:
                msgs.append(msg)
            message = b''.join(msgs).decode("utf8")
        else:
            message = self.message_encode(message)

        if op_code == -1:
            if type(message) is str:
                op_code = OPCODE_TEXT
            elif type(message) is bytes:
                op_code = OPCODE_BINARY
            if op_code == -1:
                op_code = self.default_op_code
        return message, op_code

    def send(self, message, op_code=-1):
        if not self.channel: return
        message, op_code = self.build_data(message, op_code)
        header = bytearray()
        if strutil.is_encodable(message):
            payload = message.encode("utf8")
        else:
            payload = message
        payload_length = len(payload)

        # Normal payload
        if payload_length <= 125:
            header.append(FIN | op_code)
            header.append(payload_length)

        # Extended payload
        elif payload_length >= 126 and payload_length <= 65535:
            header.append(FIN | op_code)
            header.append(PAYLOAD_LEN_EXT16)
            header.extend(struct.pack(">H", payload_length))

        # Huge extended payload
        elif payload_length < 18446744073709551616:
            header.append(FIN | op_code)
            header.append(PAYLOAD_LEN_EXT64)
            header.extend(struct.pack(">Q", payload_length))

        else:
            raise AssertionError(
                "Message is too big. Consider breaking it into chunks.")

        m = header + payload
        self._send(m)

    def _send(self, msg):
        if self.channel:
            if hasattr(self.wasc, 'threads'):
                trigger.wakeup(lambda p=self.channel, d=msg: (p.push(d), ))
            else:
                self.channel.push(msg)

    def handle_message(self, msg):
        raise NotImplementedError("handle_message () not implemented")