def main(name): global socket_map, gps_lock, font, caution_written socket_map = generate_map(name) gps_lock = Lock() t1 = Thread(target = cpuavg) t1.daemon = True t1.start() t2 = Thread(target = pmreader) t2.daemon = True t2.start() t3 = Thread(target = gps) t3.daemon = True t3.start() socket = generate_map('aircomm_app')['out'] packer = Packer(use_single_float = True) while True: try: data = [BCAST_NOFW, HEARTBEAT, int(voltage * 10), int(current * 10), int(load), mem_used(), critical] with gps_lock: try: if gps_data.fix >= 2: data += [gps_data.lon, gps_data.lat] except: pass socket.send(packer.pack(data)) except Exception, e: pass sleep(1.0)
def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception( "[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!")
def encode_command(command_code, *arguments): """ Encodes a command of the given type with given arguments. """ p = Packer(use_bin_type=True) obj = list(arguments) return p.pack(COMMAND_SET_VERSION) + p.pack(command_code) + p.pack(obj)
def __init__(self, port, key): self._CHUNK_SIZE = 2048 self._shouldRun = True self._port = port self._key = key.encode() self._server = self._createServerSocket() self._inputs = [self._server] self._outputs = [] self._client = None self._clientAddress = None self._packer = Packer() self._unpacker = Unpacker() self._encoder = None self._decoder = None self._logger = logging.getLogger(__name__).getChild("Server") self._messageDispatcher = MessageDispatcher() self._workerPool = WorkerPool() self._taskArchive = TaskArchive()
def _schedule(self, batch): """ Row - portion of the queue for each partition id created at some point in time Row Key - partition id + score interval + timestamp Column Qualifier - discrete score (first three digits after dot, e.g. 0.001_0.002, 0.002_0.003, ...) Value - QueueCell msgpack blob Where score is mapped from 0.0 to 1.0 score intervals are [0.01-0.02) [0.02-0.03) [0.03-0.04) ... [0.99-1.00] timestamp - the time when links was scheduled for retrieval. :param batch: list of tuples(score, fingerprint, domain, url) :return: """ def get_interval(score, resolution): if score < 0.0 or score > 1.0: raise OverflowError i = int(score / resolution) if i % 10 == 0 and i > 0: i = i - 1 # last interval is inclusive from right return (i * resolution, (i + 1) * resolution) timestamp = int(time() * 1E+6) data = dict() for score, fingerprint, domain, url in batch: if type(domain) == dict: partition_id = self.partitioner.partition(domain['name'], self.partitions) host_crc32 = get_crc32(domain['name']) elif type(domain) == int: partition_id = self.partitioner.partition_by_hash(domain, self.partitions) host_crc32 = domain else: raise TypeError("domain of unknown type.") item = (unhexlify(fingerprint), host_crc32, url, score) score = 1 - score # because of lexicographical sort in HBase rk = "%d_%s_%d" % (partition_id, "%0.2f_%0.2f" % get_interval(score, 0.01), timestamp) data.setdefault(rk, []).append((score, item)) table = self.connection.table(self.table_name) with table.batch(transaction=True) as b: for rk, tuples in data.iteritems(): obj = dict() for score, item in tuples: column = 'f:%0.3f_%0.3f' % get_interval(score, 0.001) obj.setdefault(column, []).append(item) final = dict() packer = Packer() for column, items in obj.iteritems(): stream = BytesIO() for item in items: stream.write(packer.pack(item)) final[column] = stream.getvalue() b.put(rk, final)
def _schedule(self, batch, timestamp): """ Row - portion of the queue for each partition id created at some point in time Row Key - partition id + score interval + random_str Column Qualifier - discrete score (first three digits after dot, e.g. 0.001_0.002, 0.002_0.003, ...) Value - QueueCell msgpack blob Where score is mapped from 0.0 to 1.0 score intervals are [0.01-0.02) [0.02-0.03) [0.03-0.04) ... [0.99-1.00] random_str - the time when links was scheduled for retrieval, microsecs :param batch: iterable of Request objects :return: """ def get_interval(score, resolution): if score < 0.0 or score > 1.0: raise OverflowError i = int(score / resolution) if i % 10 == 0 and i > 0: i = i - 1 # last interval is inclusive from right return (i * resolution, (i + 1) * resolution) random_str = int(time() * 1E+6) data = dict() for request, score in batch: domain = request.meta[b'domain'] fingerprint = request.meta[b'fingerprint'] key = self.partitioner.get_key(request) partition_id = self.partitioner.partition(key) host_crc32 = domain if type(domain) == int else get_crc32(key) item = (unhexlify(fingerprint), host_crc32, self.encoder.encode_request(request), score) score = 1 - score # because of lexicographical sort in HBase rk = "%d_%s_%d" % (partition_id, "%0.2f_%0.2f" % get_interval(score, 0.01), random_str) data.setdefault(rk, []).append((score, item)) table = self.connection.table(self.table_name) with table.batch(transaction=True) as b: for rk, tuples in six.iteritems(data): obj = dict() for score, item in tuples: column = 'f:%0.3f_%0.3f' % get_interval(score, 0.001) obj.setdefault(column, []).append(item) final = dict() packer = Packer() for column, items in six.iteritems(obj): stream = BytesIO() for item in items: stream.write(packer.pack(item)) final[column] = stream.getvalue() final[b'f:t'] = str(timestamp) b.put(rk, final)
def __init__(self, event_loop): """Wrap `event_loop` on a msgpack-aware interface.""" self.loop = event_loop self._packer = Packer(encoding='utf-8', unicode_errors=unicode_errors_default) self._unpacker = Unpacker() self._message_cb = None
def Unpacker(): global registerExtType, packb from msgpack import ExtType, unpackb, Packer, Unpacker ext_type_dict = [] kw = dict(use_bin_type=True) pack_ext = Packer(**kw).pack def registerExtType(getstate, make): code = len(ext_type_dict) ext_type_dict.append(lambda data: make(unpackb(data, use_list=False))) return lambda obj: ExtType(code, pack_ext(getstate(obj))) iterable_types = set, tuple def default(obj): try: pack = obj._pack except AttributeError: assert type(obj) in iterable_types, type(obj) return list(obj) return pack() lock = threading.Lock() pack = Packer(default, strict_types=True, **kw).pack def packb(obj): with lock: # in case that 'default' is called return pack(obj) return partial(Unpacker, use_list=False, max_buffer_size=UNPACK_BUFFER_SIZE, ext_hook=lambda code, data: ext_type_dict[code](data))
def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception("[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!")
def testPackUnicode(): test_data = [six.u(""), six.u("abcd"), [six.u("defgh")], six.u("Русский текст")] for td in test_data: re = unpackb(packb(td, encoding="utf-8"), use_list=1, encoding="utf-8") assert_equal(re, td) packer = Packer(encoding="utf-8") data = packer.pack(td) re = Unpacker(BytesIO(data), encoding="utf-8", use_list=1).unpack() assert_equal(re, td)
def test_get_buffer(): packer = Packer(autoreset=0, use_bin_type=True) packer.pack([1, 2]) strm = BytesIO() strm.write(packer.getbuffer()) written = strm.getvalue() expected = packb([1, 2], use_bin_type=True) assert written == expected
def testPackUnicode(): test_data = ["", "abcd", ["defgh"], "Русский текст"] for td in test_data: re = unpackb(packb(td), use_list=1, raw=False) assert re == td packer = Packer() data = packer.pack(td) re = Unpacker(BytesIO(data), raw=False, use_list=1).unpack() assert re == td
def testPackUnicode(): test_data = ["", "abcd", ["defgh"], "Русский текст"] for td in test_data: re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8') assert re == td packer = Packer(encoding='utf-8') data = packer.pack(td) re = Unpacker(BytesIO(data), encoding=str('utf-8'), use_list=1).unpack() assert re == td
def __init__(self, fmt, stream, **kwargs): """Create MessagePack Encoder. :param stream: Output stream :type stream: file or io.IOBase """ self.format = fmt self.stream = stream self.packer = Packer(**kwargs)
class MessagePackEncoder(object): """MessagePack Encoder.""" def __init__(self, fmt, stream, **kwargs): """Create MessagePack Encoder. :param stream: Output stream :type stream: file or io.IOBase """ self.format = fmt self.stream = stream self.packer = Packer(**kwargs) def encode(self, items): """Encode items. :param items: Items :type items: generator """ if self.format.structure == MessagePackCollectionStructure.Dictionary: return self._encode_dictionary(items) if self.format.structure == MessagePackCollectionStructure.List: return self._encode_list(items) if self.format.structure == MessagePackCollectionStructure.Objects: return self._encode_objects(items) raise ValueError('Invalid encoder mode: %s' % (self.format.structure, )) def _encode_dictionary(self, items): """Encode :code:`items` to dictionary. :param items: Items :type items: generator """ self.stream.write(self.packer.pack(DictionaryEmitter(items))) def _encode_list(self, items): """Encode :code:`items` to list. :param items: Items :type items: generator """ raise NotImplementedError def _encode_objects(self, items): """Encode :code:`items` to individual objects. :param items: Items :type items: generator """ for _, item in items: self.stream.write(self.packer.pack(item))
def testPackUnicode(): test_data = [ six.u(""), six.u("abcd"), [six.u("defgh")], six.u("Русский текст"), ] for td in test_data: re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8') assert re == td packer = Packer(encoding='utf-8') data = packer.pack(td) re = Unpacker(BytesIO(data), encoding='utf-8', use_list=1).unpack() assert re == td
def testPackUnicode(): test_data = [ six.u(""), six.u("abcd"), (six.u("defgh"),), six.u("Русский текст"), ] for td in test_data: re = unpackb(packb(td, encoding='utf-8'), encoding='utf-8') assert_equal(re, td) packer = Packer(encoding='utf-8') data = packer.pack(td) re = Unpacker(BytesIO(data), encoding='utf-8').unpack() assert_equal(re, td)
def gen_segment(name, method_suffix, arg=None, append_arg_to_name=True): packer = Packer() if arg is None: getattr(packer, 'pack' + method_suffix)() else: getattr(packer, 'pack' + method_suffix)(arg) if append_arg_to_name: name = name + ' (' + str(arg) + ')' return OrderedDict( [('name', name), ('method_suffix', method_suffix), ('b64', packer.get_bytes().encode('base64', 'strict').replace('\n', ''))])
def handle(self): packer = Packer(use_bin_type=True) unpacker = Unpacker(raw=False, max_buffer_size=10 * 1024 * 1024) while True: buf = self.cli_sock.recv(1024) if not buf: break unpacker.feed(buf) for request in unpacker: response = self.process(request) self.cli_sock.sendall(packer.pack(response))
def testPackUnicode(): test_data = [ "", "abcd", ("defgh",), "Русский текст", ] for td in test_data: re = unpacks(packs(td, encoding='utf-8'), encoding='utf-8') assert_equal(re, td) packer = Packer(encoding='utf-8') data = packer.pack(td) re = Unpacker(BytesIO(data), encoding='utf-8').unpack() assert_equal(re, td)
def testArraySize(sizes=[0, 5, 50, 1000]): bio = BytesIO() packer = Packer() for size in sizes: bio.write(packer.pack_array_header(size)) for i in range(size): bio.write(packer.pack(i)) bio.seek(0) unpacker = Unpacker(bio, use_list=1) for size in sizes: assert unpacker.unpack() == list(range(size))
def log_events(): sock = ctx.socket(zmq.SUB) sock.bind("inproc://raw_events") sock.setsockopt(zmq.SUBSCRIBE, "") packer = Packer() with open('tweets.%d.msgpack' % int(time.time()), 'wb') as f: # intentionally writing the raw bytes and not parsing it here while True: msg = sock.recv() LOG.debug('event received: %d bytes', len(msg)) f.write(packer.pack({'time' : int(time.time()), 'event' : msg})) f.flush()
def __init__(self, config, logger=None): BlacknetSSLInterface.__init__(self, config, 'honeypot') self.__logger = logger self.__server_hostname = None self.__server_address = None self.__server_socket = None self.__server_error = False self.__client_name = None self.__connect_lock = RLock() self.__send_lock = Lock() self.__packer = Packer(encoding='utf-8') self.__unpacker = Unpacker(encoding='utf-8')
def testMapSize(sizes=[0, 5, 50, 1000]): bio = BytesIO() packer = Packer() for size in sizes: bio.write(packer.pack_map_header(size)) for i in range(size): bio.write(packer.pack(i)) # key bio.write(packer.pack(i * 2)) # value bio.seek(0) unpacker = Unpacker(bio) for size in sizes: assert unpacker.unpack() == dict((i, i * 2) for i in range(size))
def test_manualreset(sizes=[0, 5, 50, 1000]): packer = Packer(autoreset=False) for size in sizes: packer.pack_array_header(size) for i in range(size): packer.pack(i) bio = BytesIO(packer.bytes()) unpacker = Unpacker(bio, use_list=1) for size in sizes: assert unpacker.unpack() == list(range(size)) packer.reset() assert packer.bytes() == b''
def test_packer_unpacker(self): buf = BytesIO() packer = Packer() buf.write(packer.pack(1)) buf.write(packer.pack('2')) buf.write(packer.pack({})) buf.seek(0) unpacker = Unpacker(buf) v1 = unpacker.unpack() self.assertEqual(1, v1) v2 = unpacker.unpack() self.assertEqual('2', v2) v3 = unpacker.unpack() self.assertTrue(isinstance(v3, dict))
def request(self, command, **params): packer = Packer(use_bin_type=True) unpacker = Unpacker(raw=False, max_buffer_size=10 * 1024 * 1024) request = BackdoorRequest(command, params) LOG.debug(f'backdoor client sending request {request}') self.sock.sendall(packer.pack(request.to_dict())) while True: buf = self.sock.recv(1024) if not buf: break unpacker.feed(buf) for response in unpacker: response = BackdoorResponse(response['ok'], response['content']) LOG.debug(f'backdoor client received response {response}') return response
def __init__(self, event_loop): """Wrap `event_loop` on a msgpack-aware interface.""" self._event_loop = event_loop self._posted = deque() self._packer = Packer(use_bin_type=True) self._unpacker = Unpacker() self._message_cb = None self._stopped = False
class MsgpackStream(object): """Two-way msgpack stream that wraps a event loop byte stream. This wraps the event loop interface for reading/writing bytes and exposes an interface for reading/writing msgpack documents. """ def __init__(self, event_loop): """Wrap `event_loop` on a msgpack-aware interface.""" self.loop = event_loop self._packer = Packer(unicode_errors=unicode_errors_default) self._unpacker = Unpacker(unicode_errors=unicode_errors_default) self._message_cb = None def threadsafe_call(self, fn): """Wrapper around `BaseEventLoop.threadsafe_call`.""" self.loop.threadsafe_call(fn) def send(self, msg): """Queue `msg` for sending to Nvim.""" pass # replaces next logging statement #debug('sent %s', msg) self.loop.send(self._packer.pack(msg)) def run(self, message_cb): """Run the event loop to receive messages from Nvim. While the event loop is running, `message_cb` will be called whenever a message has been successfully parsed from the input stream. """ self._message_cb = message_cb self.loop.run(self._on_data) self._message_cb = None def stop(self): """Stop the event loop.""" self.loop.stop() def close(self): """Close the event loop.""" self.loop.close() def _on_data(self, data): self._unpacker.feed(data) while True: try: pass # replaces next logging statement #debug('waiting for message...') msg = next(self._unpacker) pass # replaces next logging statement #debug('received message: %s', msg) self._message_cb(msg) except StopIteration: pass # replaces next logging statement #debug('unpacker needs more data...') break
def test_basic_segment(key): method_suffix = test_segment['suffix'] read_bytes = test_segment['b64'].decode('base64', 'strict') unpacker = Unpacker(read_bytes) read_value = getattr(unpacker, 'unpack' + method_suffix)() logging.debug('[%s] read %s', key, read_value) packer = Packer() if read_value is None: getattr(packer, 'pack' + method_suffix)() else: getattr(packer, 'pack' + method_suffix)(read_value) write_bytes = packer.get_bytes() out_b64 = write_bytes.encode('base64', 'strict').replace('\n', '') if out_b64 != test_segment['b64']: compare_bytes(convert_bytes(write_bytes), convert_bytes(read_bytes)) assert out_b64 == test_segment['b64']
def __init__(self, bns, client): super(BlacknetServerThread, self).__init__() handler = { BlacknetMsgType.HELLO: self.handle_hello, BlacknetMsgType.CLIENT_NAME: self.handle_client_name, BlacknetMsgType.SSH_CREDENTIAL: self.handle_ssh_credential, BlacknetMsgType.SSH_PUBLICKEY: self.handle_ssh_publickey, BlacknetMsgType.PING: self.handle_ping, BlacknetMsgType.GOODBYE: self.handle_goodbye, } self.handler = handler self.started = False self.database = BlacknetDatabase(bns.config, bns.logger) self.__blacklist = bns.blacklist self.__client = None self.__connect_lock = Lock() self.__cursor = None self.__logger = bns.logger self.__mysql_error = 0 self.__session_interval = bns.session_interval self.__unpacker = Unpacker(encoding='utf-8') self.__packer = Packer(encoding='utf-8') self.__dropped_count = 0 self.__attempt_count = 0 self.__atk_cache = {} self.__ses_cache = {} self.__key_cache = {} self.__test_mode = bns.test_mode peer = client.getpeername() self.__peer_ip = peer[0] if peer else "local" self.__use_ssl = (client.family != socket.AF_UNIX) client.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) if self.__use_ssl: client = bns.ssl_context.wrap_socket(client, server_side=True) self.__client = client self.name = self.peername self.log_info("starting session (SSL: %s)" % self.__use_ssl)
class MsgpackStream(object): """Two-way msgpack stream that wraps a event loop byte stream. This wraps the event loop interface for reading/writing bytes and exposes an interface for reading/writing msgpack documents. """ def __init__(self, event_loop): """Wrap `event_loop` on a msgpack-aware interface.""" self.loop = event_loop self._packer = Packer(encoding='utf-8', unicode_errors=unicode_errors_default) self._unpacker = Unpacker() self._message_cb = None def threadsafe_call(self, fn): """Wrapper around `BaseEventLoop.threadsafe_call`.""" self.loop.threadsafe_call(fn) def send(self, msg): """Queue `msg` for sending to Nvim.""" debug('sent %s', msg) self.loop.send(self._packer.pack(msg)) def run(self, message_cb): """Run the event loop to receive messages from Nvim. While the event loop is running, `message_cb` will be called whenever a message has been successfully parsed from the input stream. """ self._message_cb = message_cb self.loop.run(self._on_data) self._message_cb = None def stop(self): """Stop the event loop.""" self.loop.stop() def close(self): """Close the event loop.""" self.loop.close() def _on_data(self, data): self._unpacker.feed(data) while True: try: debug('waiting for message...') msg = next(self._unpacker) debug('received message: %s', msg) self._message_cb(msg) except StopIteration: debug('unpacker needs more data...') break
class File(object): def __init__(self, io): self.io = io self.packer = Packer(use_bin_type=True) def write(self, *args): args = tuple(bytes(x) if isinstance(x, buffer) else x for x in args) self.io.write(self.packer.pack(args)) def close(self): self.io.flush() self.io.close()
class File(object): def __init__(self, io): self.io = io self.packer = Packer(use_bin_type=True) def write(self, *args): self.io.write(self.packer.pack(args)) def flush(self): self.io.flush() def close(self): self.io.close()
def __call__(self, iterable): """Fill the queue with given objects Hoping than msgpack.Packer gets a streaming API, 'iterable' should not be split (i.e. this method should be called only once, like __iter__). """ pack = Packer(use_bin_type=True).pack max_size = self._max_size array = self._array pos = self._pos size = self._size lock, get_lock, put_lock = self._locks left = 0 for data in iterable: data = pack(data) n = len(data) i = 0 while 1: if not left: while 1: with lock: p = pos.value j = size.value left = max_size - j if left: break put_lock.acquire() p += j if p >= max_size: p -= max_size e = min(p + min(n, left), max_size) j = e - p array[p:e] = data[i:i+j] n -= j i += j with lock: p = pos.value s = size.value j += s size.value = j if not s: get_lock.acquire(0) get_lock.release() p += j if p >= max_size: p -= max_size left = max_size - j if not n: break
def __init__(self, event_loop): """Wrap `event_loop` on a msgpack-aware interface.""" self._event_loop = event_loop self._packer = Packer(use_bin_type=True, encoding=None) self._unpacker = Unpacker() self._message_cb = None
def test_pairlist(): pairlist = [(b'a', 1), (2, b'b'), (b'foo', b'bar')] packer = Packer() packed = packer.pack_map_pairs(pairlist) unpacked = unpackb(packed, object_pairs_hook=list) assert pairlist == unpacked
def default(self, obj): if isinstance(obj, complex): return {b'__complex__': True, b'real': 1, b'imag': 2} return Packer.default(self, obj)
def test_bad_hook(): cp = Packer() packed = cp.pack([3, 1+2j]) unpacked = unpacks(packed)
def test_map_header(): packer = Packer() packer.pack_map_header(2**32-1) with pytest.raises((OverflowError, ValueError)): packer.pack_array_header(2**32)
def test_map_header(): packer = Packer() packer.pack_map_header(2**32-1) with pytest.raises(PackValueError): packer.pack_array_header(2**32)
def __init__(self, default): self.default = default Packer.__init__(self, default=self.default)
def __init__(self): Packer.__init__(self, default=self.default)
def test_pairlist(): pairlist = [(b"a", 1), (2, b"b"), (b"foo", b"bar")] packer = Packer() packed = packer.pack_map_pairs(pairlist) unpacked = unpackb(packed, object_pairs_hook=list, strict_map_key=False) assert pairlist == unpacked
def set_packer_encoding(self, encoding): """Switch encoding for Unicode strings.""" self._packer = Packer(use_bin_type=True, encoding=encoding)
class MsgpackStream(object): """Two-way msgpack stream that wraps a event loop byte stream. This wraps the event loop interface for reading/writing bytes and exposes an interface for reading/writing msgpack documents. """ def __init__(self, event_loop): """Wrap `event_loop` on a msgpack-aware interface.""" self._event_loop = event_loop self._posted = deque() self._packer = Packer(use_bin_type=True) self._unpacker = Unpacker() self._message_cb = None self._stopped = False def post(self, msg): """Post `msg` to the read queue of the `MsgpackStream` instance. Use the event loop `interrupt()` method to push msgpack objects from other threads. """ self._posted.append(msg) self._event_loop.interrupt() def send(self, msg): """Queue `msg` for sending to Nvim.""" debug('sent %s', msg) self._event_loop.send(self._packer.pack(msg)) def run(self, message_cb): """Run the event loop to receive messages from Nvim. While the event loop is running, `message_cb` will be called whenever a message has been successfully parsed from the input stream. """ self._message_cb = message_cb self._run() self._message_cb = None def stop(self): """Stop the event loop.""" self._stopped = True self._event_loop.stop() def _run(self): self._stopped = False while not self._stopped: if self._posted: self._message_cb(self._posted.popleft()) continue self._event_loop.run(self._on_data) def _on_data(self, data): self._unpacker.feed(data) while True: try: debug('waiting for message...') msg = next(self._unpacker) debug('received message: %s', msg) self._message_cb(msg) except StopIteration: debug('unpacker needs more data...') break