Ejemplo n.º 1
0
class SReader():
    """ Define an asyncio msgpack stream decoder. """

    def __init__(self, reader, writer):
        """ Pass ina  stream reader to unmarshall msgpack objects from. """
        self.reader = reader
        self.writer = writer
        self.decoder = make_decoder()
        self.unpacker = Unpacker(ext_hook=self.decoder, encoding="utf8")
        self.obj_buf = []


    @asyncio.coroutine
    def get(self):
        """ The co-routine providing objects. """

        while len(self.obj_buf) == 0:
            buf = yield from self.reader.read(1000)

            self.unpacker.feed(buf)
            for o in self.unpacker:
                self.obj_buf.append(o)

        return self.obj_buf.pop(0)


    def put(self, obj):
        """ Write an object to the channel. """
        self.writer.write(encode(obj))
Ejemplo n.º 2
0
class MsgpackProtocol(asyncio.Protocol):

    def __init__(self, routes):
        self.__routes = routes
        self.packer = Unpacker()

    def connection_made(self, transport):
        peername = transport.get_extra_info('peername')
        print('Connection from {}'.format(peername))
        self.transport = transport
        self.transport.write(packb([2, 'peername', peername]))

    def data_received(self, data):
        self.packer.feed(data)
        for msg in self.packer:
            assert_request(msg)
            self.routing(msg)

    def routing(self, cmd):
        assert cmd[2] in self.__routes
        t = asyncio.ensure_future(response(cmd[1], self.transport,
                                           self.__routes[cmd[2]], cmd[3]))

    def eof_received(self):
        return True
Ejemplo n.º 3
0
def test3():
    start = 0
    end = 10

    metric = "marion.channel-0"

    raw_series = REDIS_CONN.get(settings.FULL_NAMESPACE + metric)
    if not raw_series:
        resp = json.dumps({'results': 'Error: No metric by that name'})
        return resp, 404
    else:
        unpacker = Unpacker(use_list = False)
        unpacker.feed(raw_series)
        timeseries = []

        point = {'x':datapoint[0],'y':datapoint[1]}

        if (start is None) and (end is not None):
            for datapoint in unpacker:
                if datapoint[0] < int(end):
                    timeseries.append(point)
        elif (start is not None) and (end is None):
            for datapoint in unpacker:
                if datapoint[0] > int(start):
                    timeseries.append(point)
        elif (start is not None) and (end is not None):
            for datapoint in unpacker:
                if (datapoint[0] > int(start)) and (datapoint[0] < int(end)):
                    timeseries.append(point)
        elif (start is None) and (end is None):
            timeseries = [{'x':datapoint[0],'y':datapoint[1]} for datapoint in unpacker]

        resp = json.dumps({'results': timeseries})
        return resp, 200
Ejemplo n.º 4
0
class ClientProtocol(asyncio.Protocol):

    def __init__(self):
        self._cpt = -1
        self.packer = Unpacker()
        self._responses = dict()

    def connection_made(self, transport):
        print("connected")
        self.transport = transport

    def request(self, name, args, f):
        print("send request")
        self._cpt += 1
        self._responses[self._cpt] = f
        self.transport.write(packb([0, self._cpt, name, args]))


    def data_received(self, data):
        self.packer.feed(data)
        for msg in self.packer:
            if msg[0] == 1:
                self._responses[msg[1]].set_result(msg)

    def connection_lost(self, exc):
        pass
Ejemplo n.º 5
0
def anomalies():
    resp = 'handle_data([])'
    try:
        analyzer_key_node = REDIS_BACKENDS.get_node(settings.ANALYZER_ANOMALY_KEY)
        anomaly_keys = RING.run('smembers', settings.ANALYZER_ANOMALY_KEY)
        anomalies = {}
        if not anomaly_keys:
            logger.info("No anomaly key found!")
            return resp, 200
        for key in list(anomaly_keys):
            raw_anomalies = RING.run('get',key)
            if not raw_anomalies:
                logger.info("Can't get anomalies for key %s, removing it from set" % key)
                RING.run('srem', settings.ANALYZER_ANOMALY_KEY, key)
                continue
            unpacker = Unpacker(use_list = False)
            unpacker.feed(raw_anomalies)
            for item in unpacker:
                anomalies.update(item)
        anomaly_list = []
        for anom, value in anomalies.iteritems():
                anomaly_list.append([value, anom])
        if len(anomaly_list) > 0:
            anomaly_list.sort(key=operator.itemgetter(1))
            resp = 'handle_data(%s)' % anomaly_list
    except Exception as e:
        logger.error("Error getting anomalies: %s" % str(e))
    return resp, 200
Ejemplo n.º 6
0
def data():
    metric = request.args.get('metric', None)
    start = request.args.get('start', None)
    end = request.args.get('end', None)

    if metric is None:
        metrics = ['channel-0', 'channel-1', 'channel-2', 'channel-3', 'channel-4', 'channel-5', 'channel-6', 'channel-7']
    else:
        metrics = [metric]

    try:
        all_channels_data = []
        for metric in metrics:

            single_channel_data = {}

            raw_series = REDIS_CONN.get(settings.FULL_NAMESPACE + metric)
            if not raw_series:
                resp = json.dumps({'results': 'Error: No metric by that name'})
                return resp, 404
            else:
                unpacker = Unpacker(use_list = False)
                unpacker.feed(raw_series)
                timeseries = []

                if (start is None) and (end is not None):
                    for datapoint in unpacker:
                        if datapoint[0] < int(end):
                            point = {'x' : datapoint[0], 'y':datapoint[1]}
                            timeseries.append(point)
                elif (start is not None) and (end is None):
                    for datapoint in unpacker:
                        if datapoint[0] > int(start):
                            point = {'x' : datapoint[0], 'y':datapoint[1]}
                            timeseries.append(point)
                elif (start is not None) and (end is not None):
                    for datapoint in unpacker:
                        if (datapoint[0] > int(start)) and (datapoint[0] < int(end)):
                            point = {'x' : datapoint[0], 'y':datapoint[1]}
                            timeseries.append(point)
                elif (start is None) and (end is None):
                    timeseries = [{'x' : datapoint[0], 'y':datapoint[1]} for datapoint in unpacker]

                single_channel_data['key'] = metric
                single_channel_data['values'] = timeseries
                all_channels_data.append(single_channel_data)

        resp = json.dumps({'results': all_channels_data})
        return resp, 200

    except Exception as e:
        error = "Error: " + e
        resp = json.dumps({'results': error})
        return resp, 500

    except Exception as e:
        error = "Error: " + e
        resp = json.dumps({'results': error})
        return resp, 500
Ejemplo n.º 7
0
def test_incorrect_type_nested_map():
    unpacker = Unpacker()
    unpacker.feed(packb([{"a": "b"}]))
    try:
        unpacker.read_map_header()
        assert 0, "should raise exception"
    except UnexpectedTypeException:
        assert 1, "okay"
Ejemplo n.º 8
0
def test_correct_type_nested_array():
    unpacker = Unpacker()
    unpacker.feed(packb({"a": ["b", "c", "d"]}))
    try:
        unpacker.read_array_header()
        assert 0, "should raise exception"
    except UnexpectedTypeException:
        assert 1, "okay"
Ejemplo n.º 9
0
def mpdecode(iterable):
    unpacker = Unpacker(encoding='utf8')
    for chunk in iterable:
        unpacker.feed(chunk)
        # Each chunk can have none or many objects,
        # so here we dispatch any object ready
        for obj in unpacker:
            yield obj
Ejemplo n.º 10
0
def test_incorrect_type_array():
    unpacker = Unpacker()
    unpacker.feed(packb(1))
    try:
        unpacker.read_array_header()
        assert 0, 'should raise exception'
    except UnexpectedTypeException:
        assert 1, 'okay'
Ejemplo n.º 11
0
def test_correct_type_nested_array():
    unpacker = Unpacker()
    unpacker.feed(packb({'a': ['b', 'c', 'd']}))
    try:
        unpacker.read_array_header()
        assert 0, 'should raise exception'
    except UnexpectedTypeException:
        assert 1, 'okay'
Ejemplo n.º 12
0
def test_incorrect_type_nested_map():
    unpacker = Unpacker()
    unpacker.feed(packb([{'a': 'b'}]))
    try:
        unpacker.read_map_header()
        assert 0, 'should raise exception'
    except UnexpectedTypeException:
        assert 1, 'okay'
Ejemplo n.º 13
0
		def __init__(self, cb):
			self.cb = cb

			def listhook(obj):
				return self.cb(obj)
			self.listhook = listhook

			Unpacker.__init__(self, list_hook=self.listhook)
Ejemplo n.º 14
0
def test_auto_max_array_len():
    packed = b'\xde\x00\x06zz'
    with pytest.raises(UnpackValueError):
        unpackb(packed, raw=False)

    unpacker = Unpacker(max_buffer_size=5, raw=False)
    unpacker.feed(packed)
    with pytest.raises(UnpackValueError):
        unpacker.unpack()
Ejemplo n.º 15
0
    def setUp(self):
        address = 0xfa1afe1
        device = "LivewareProblem"

        raw_packet = encode_erase_flash_page(address, device)

        unpacker = Unpacker()
        unpacker.feed(raw_packet)
        self.command = list(unpacker)[1:]
Ejemplo n.º 16
0
def test_write_bytes_multi_buffer():
    long_val = (5) * 100
    expected = packb(long_val)
    unpacker = Unpacker(six.BytesIO(expected), read_size=3, max_buffer_size=3)

    f = six.BytesIO()
    unpacked = unpacker.unpack(f.write)
    assert unpacked == long_val
    assert f.getvalue() == expected
Ejemplo n.º 17
0
def unpack_gen(file, size):
   u = Unpacker()
   while True:
      data = file.read(size)
      if not data:
         break
      u.feed(data)
      for o in u:
         yield o
Ejemplo n.º 18
0
 def get_edge(self, filenum, offset):
     if filenum in self.open_file_cache:
         fileobj = self.open_file_cache[filenum]
     else:
         filename = 'part_%02d.msgpack' % filenum
         fileobj = open(os.path.join(self.edge_dir, filename), 'rb')
         self.open_file_cache[filenum] = fileobj
     fileobj.seek(offset)
     unpacker = Unpacker(fileobj, encoding=encoding)
     return unpacker.unpack()
Ejemplo n.º 19
0
 def mpack_handler(self, data, sock):
     unpacker = Unpacker()
     unpacker.feed(data)
     while 1:
         for msg in unpacker:
             self.on_message(msg)
         next = sock.recv(1000000)
         if not next:
             break
         unpacker.feed(next)
Ejemplo n.º 20
0
def test_auto_max_map_len():
    # len(packed) == 6 -> max_map_len == 3
    packed = b'\xde\x00\x04zzz'
    with pytest.raises(UnpackValueError):
        unpackb(packed, raw=False)

    unpacker = Unpacker(max_buffer_size=6, raw=False)
    unpacker.feed(packed)
    with pytest.raises(UnpackValueError):
        unpacker.unpack()
Ejemplo n.º 21
0
def test_foobar():
    unpacker = Unpacker(read_size=3)
    unpacker.feed(b'foobar')
    assert unpacker.unpack() == ord(b'f')
    assert unpacker.unpack() == ord(b'o')
    assert unpacker.unpack() == ord(b'o')
    assert unpacker.unpack() == ord(b'b')
    assert unpacker.unpack() == ord(b'a')
    assert unpacker.unpack() == ord(b'r')
    try:
        o = unpacker.unpack()
        print(("Oops!", o))
        assert 0
    except StopIteration:
        assert 1
    else:
        assert 0
    unpacker.feed(b'foo')
    unpacker.feed(b'bar')

    k = 0
    for o, e in zip(unpacker, b'foobarbaz'):
        assert o == e
        k += 1
    assert k == len(b'foobar')
def test_foobar():
    unpacker = Unpacker(read_size=3)
    unpacker.feed('foobar')
    assert_equal(unpacker.unpack(), ord('f'))
    assert_equal(unpacker.unpack(), ord('o'))
    assert_equal(unpacker.unpack(), ord('o'))
    assert_equal(unpacker.unpack(), ord('b'))
    assert_equal(unpacker.unpack(), ord('a'))
    assert_equal(unpacker.unpack(), ord('r'))
    try:
        o = unpacker.unpack()
        print "Oops!", o
        assert 0
    except StopIteration:
        assert 1
    else:
        assert 0
    unpacker.feed('foo')
    unpacker.feed('bar')

    k = 0
    for o, e in zip(unpacker, 'foobarbaz'):
        assert o == ord(e)
        k += 1
    assert k == len('foobar')
Ejemplo n.º 23
0
def test_foobar():
    unpacker = Unpacker(read_size=3)
    unpacker.feed("foobar")
    assert unpacker.unpack() == ord(b"f")
    assert unpacker.unpack() == ord(b"o")
    assert unpacker.unpack() == ord(b"o")
    assert unpacker.unpack() == ord(b"b")
    assert unpacker.unpack() == ord(b"a")
    assert unpacker.unpack() == ord(b"r")
    try:
        o = unpacker.unpack()
        print "Oops!", o
        assert 0
    except StopIteration:
        assert 1
    else:
        assert 0
    unpacker.feed(b"foo")
    unpacker.feed(b"bar")

    k = 0
    for o, e in zip(unpacker, b"foobarbaz"):
        assert o == ord(e)
        k += 1
    assert k == len(b"foobar")
Ejemplo n.º 24
0
    def test_has_correct_protocol_version(self):
        """
        Checks that the command encoding function works corectly.
        """
        raw_packet = encode_command(command_code=10)

        unpacker = Unpacker()
        unpacker.feed(raw_packet)

        version, *_ = list(unpacker)
        self.assertEqual(2, version)
Ejemplo n.º 25
0
    def setUp(self):
        address = 0xdeadbeef
        data = bytes(range(4))
        device = "dummy"

        raw_packet = encode_write_flash(data, address, device)

        unpacker = Unpacker()
        unpacker.feed(raw_packet)
        # Discards command set version
        self.command = list(unpacker)[1:]
Ejemplo n.º 26
0
class MsgpackStream(object):

    """Two-way msgpack stream that wraps a event loop byte stream.

    This wraps the event loop interface for reading/writing bytes and
    exposes an interface for reading/writing msgpack documents.
    """

    def __init__(self, event_loop):
        """Wrap `event_loop` on a msgpack-aware interface."""
        self.loop = event_loop
        self._packer = Packer(encoding='utf-8',
                              unicode_errors=unicode_errors_default)
        self._unpacker = Unpacker()
        self._message_cb = None

    def threadsafe_call(self, fn):
        """Wrapper around `BaseEventLoop.threadsafe_call`."""
        self.loop.threadsafe_call(fn)

    def send(self, msg):
        """Queue `msg` for sending to Nvim."""
        debug('sent %s', msg)
        self.loop.send(self._packer.pack(msg))

    def run(self, message_cb):
        """Run the event loop to receive messages from Nvim.

        While the event loop is running, `message_cb` will be called whenever
        a message has been successfully parsed from the input stream.
        """
        self._message_cb = message_cb
        self.loop.run(self._on_data)
        self._message_cb = None

    def stop(self):
        """Stop the event loop."""
        self.loop.stop()

    def close(self):
        """Close the event loop."""
        self.loop.close()

    def _on_data(self, data):
        self._unpacker.feed(data)
        while True:
            try:
                debug('waiting for message...')
                msg = next(self._unpacker)
                debug('received message: %s', msg)
                self._message_cb(msg)
            except StopIteration:
                debug('unpacker needs more data...')
                break
Ejemplo n.º 27
0
def testArraySize(sizes=[0, 5, 50, 1000]):
    bio = BytesIO()
    packer = Packer()
    for size in sizes:
        bio.write(packer.pack_array_header(size))
        for i in range(size):
            bio.write(packer.pack(i))

    bio.seek(0)
    unpacker = Unpacker(bio, use_list=1)
    for size in sizes:
        assert unpacker.unpack() == list(range(size))
Ejemplo n.º 28
0
 def mpack_handler(self, data, sock):
     unpacker = Unpacker()
     unpacker.feed(data)
     # default chunk size of memory buffer is 32MB
     RECV_SIZE = 32*1024*1024
     while 1:
         for msg in unpacker:
             self.on_message(msg)
         next_data = sock.recv(RECV_SIZE)
         if not next_data:
             break
         unpacker.feed(next_data)
Ejemplo n.º 29
0
 def call(self, name, args):
     self._cpt += 1
     self._socket.sendall(packb([0, self._cpt, name, args]))
     u = Unpacker()
     while True:
         data = self._socket.recv(2048)
         u.feed(data)
         for r in u:
             if r[0] == 2:
                 self.events.put(r)
             else:
                 return r
Ejemplo n.º 30
0
    def handle(self):
        unpacker = Unpacker()
        dispatch = self.dispatcher.dispatch
        _send_response = self._send_response

        while True:
            data = self.request.recv(4096)
            if not data:
                break
            unpacker.feed(data)
            for msg in unpacker:
                dispatch(msg, _send_response)
Ejemplo n.º 31
0
def testPackUnicode():
    test_data = ["", "abcd", ["defgh"], "Русский текст"]
    for td in test_data:
        re = unpackb(packb(td), use_list=1, raw=False)
        assert re == td
        packer = Packer()
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), raw=False, use_list=1).unpack()
        assert re == td
Ejemplo n.º 32
0
def test_read_map_header():
    unpacker = Unpacker()
    unpacker.feed(packb({"a": "A"}))
    assert unpacker.read_map_header() == 1
    assert unpacker.unpack() == "a"
    assert unpacker.unpack() == "A"
    try:
        unpacker.unpack()
        assert 0, "should raise exception"
    except OutOfData:
        assert 1, "okay"
Ejemplo n.º 33
0
def test_issue124():
    unpacker = Unpacker()
    unpacker.feed(b'\xa1?\xa1!')
    assert tuple(unpacker) == (b'?', b'!')
    assert tuple(unpacker) == ()
    unpacker.feed(b"\xa1?\xa1")
    assert tuple(unpacker) == (b'?', )
    assert tuple(unpacker) == ()
    unpacker.feed(b"!")
    assert tuple(unpacker) == (b'!', )
    assert tuple(unpacker) == ()
Ejemplo n.º 34
0
def test_issue124():
    unpacker = Unpacker()
    unpacker.feed(b"\xa1?\xa1!")
    assert tuple(unpacker) == ("?", "!")
    assert tuple(unpacker) == ()
    unpacker.feed(b"\xa1?\xa1")
    assert tuple(unpacker) == ("?", )
    assert tuple(unpacker) == ()
    unpacker.feed(b"!")
    assert tuple(unpacker) == ("!", )
    assert tuple(unpacker) == ()
Ejemplo n.º 35
0
def test_read_map_header():
    unpacker = Unpacker()
    unpacker.feed(packb({'a': 'A'}))
    assert unpacker.read_map_header() == 1
    assert unpacker.unpack() == B'a'
    assert unpacker.unpack() == B'A'
    try:
        unpacker.unpack()
        assert 0, 'should raise exception'
    except StopIteration:
        assert 1, 'okay'
Ejemplo n.º 36
0
def verify_holt_winters(metric_name):
    HOLT_CACHE_DURATION = 1800
    HOLT_WINTERS_COUNT = 4
    CABINET = "/opt/skyline/src/cabinet"
    full_holt_series = []
    known_metrics = {}
    recent_holt_time = time() - HOLT_CACHE_DURATION

    db = DB()

    if not db.open(CABINET + "/" + metric_name + ".kct",
                   DB.OREADER | DB.ONOLOCK):
        return HOLT_WINTERS_COUNT

    seen_holt = redis_conn.get('holt_' + metric_name)

    # We've put a holt_ record in redis for this metric
    if seen_holt is not None:
        full_holt_series = unpackb(seen_holt)
        # The last item in the series was seen > HOLT_CACHE_DURATION ago
        if full_holt_series[-1][0] < recent_holt_time:
            full_holt_series = get_holt_from_cabinet(full_holt_series, db)
    else:
        full_holt_series = get_holt_from_cabinet(full_holt_series, db)

    for value in full_holt_series:
        known_metrics[str(value[0])] = 1

    db.close()

    # Add the last FULL_DURATION to the cabinet data for any missing items
    raw_metric = redis_conn.mget(metric_name)
    for i, local_metric in enumerate(raw_metric):
        unpacker = Unpacker(use_list=False)
        unpacker.feed(local_metric)
        potential_new = list(unpacker)
        for value in potential_new:
            if not known_metrics.has_key(str(value[0])):
                full_holt_series.append((float(value[0]), float(value[1])))

    redis_conn.set('holt_' + metric_name, packb(full_holt_series))

    count = holtWintersDeviants(full_holt_series)
    return count
Ejemplo n.º 37
0
def testPackUnicode():
    test_data = ["", "abcd", ["defgh"], "Русский текст"]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8')
        assert re == td
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding=str('utf-8'),
                      use_list=1).unpack()
        assert re == td
Ejemplo n.º 38
0
    def __init__(self, fmt, stream):
        """Create MessagePack decoder.

        :param stream: Stream
        :type stream: file or io.IOBase
        """
        self.format = fmt
        self.stream = stream

        self.unpacker = Unpacker(self.stream, encoding='utf-8')
Ejemplo n.º 39
0
    def __init__(self, bns, client):
        super(BlacknetServerThread, self).__init__()

        handler = {
            BlacknetMsgType.HELLO: self.handle_hello,
            BlacknetMsgType.CLIENT_NAME: self.handle_client_name,
            BlacknetMsgType.SSH_CREDENTIAL: self.handle_ssh_credential,
            BlacknetMsgType.SSH_PUBLICKEY: self.handle_ssh_publickey,
            BlacknetMsgType.PING: self.handle_ping,
            BlacknetMsgType.GOODBYE: self.handle_goodbye,
        }
        self.handler = handler
        self.started = False

        self.database = BlacknetDatabase(bns.config, bns.logger)
        self.__blacklist = bns.blacklist
        self.__client = None
        self.__connect_lock = Lock()
        self.__cursor = None
        self.__logger = bns.logger
        self.__mysql_error = 0
        self.__session_interval = bns.session_interval
        self.__unpacker = Unpacker(encoding='utf-8')
        self.__packer = Packer(encoding='utf-8')
        self.__dropped_count = 0
        self.__attempt_count = 0
        self.__atk_cache = {}
        self.__ses_cache = {}
        self.__key_cache = {}
        self.__test_mode = bns.test_mode

        peer = client.getpeername()
        self.__peer_ip = peer[0] if peer else "local"
        self.__use_ssl = (client.family != socket.AF_UNIX)

        client.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
        if self.__use_ssl:
            client = bns.ssl_context.wrap_socket(client, server_side=True)
        self.__client = client

        self.name = self.peername
        self.log_info("starting session (SSL: %s)" % self.__use_ssl)
Ejemplo n.º 40
0
def testPackUnicode():
    test_data = [
        six.u(""), six.u("abcd"), [six.u("defgh")], six.u("Русский текст"),
        ]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8')
        assert re == td
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding='utf-8', use_list=1).unpack()
        assert re == td
Ejemplo n.º 41
0
def testPackUnicode():
    test_data = [
        six.u(""), six.u("abcd"), (six.u("defgh"),), six.u("Русский текст"),
        ]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), encoding='utf-8')
        assert_equal(re, td)
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding='utf-8').unpack()
        assert_equal(re, td)
def test_unpack_tell():
    stream = io.BytesIO()
    messages = [2**i - 1 for i in range(65)]
    messages += [-(2**i) for i in range(1, 64)]
    messages += [
        b'hello', b'hello' * 1000,
        list(range(20)), {i: bytes(i) * i
                          for i in range(10)},
        {i: bytes(i) * i
         for i in range(32)}
    ]
    offsets = []
    for m in messages:
        pack(m, stream)
        offsets.append(stream.tell())
    stream.seek(0)
    unpacker = Unpacker(stream)
    for m, o in zip(messages, offsets):
        m2 = next(unpacker)
        assert m == m2
        assert o == unpacker.tell()
Ejemplo n.º 43
0
def socket_incoming_connection(socket, address):

    logger.debug('connected %s', address)

    sockets[address] = socket

    unpacker = Unpacker(encoding='utf-8')
    while True:
        data = socket.recv(4096)

        if not data:
            logger.debug('closed connection %s', address)
            break

        unpacker.feed(data)

        for msg in unpacker:
            receive.put(InMsg(msg, address))
            logger.debug('got socket msg: %s', msg)

    sockets.pop(address)
Ejemplo n.º 44
0
def test_unpack_array_header_from_file():
    f = BytesIO(packb([1, 2, 3, 4]))
    unpacker = Unpacker(f)
    assert unpacker.read_array_header() == 4
    assert unpacker.unpack() == 1
    assert unpacker.unpack() == 2
    assert unpacker.unpack() == 3
    assert unpacker.unpack() == 4
    with raises(OutOfData):
        unpacker.unpack()
Ejemplo n.º 45
0
def test_maxbuffersize():
    nose.tools.assert_raises(ValueError,
                             Unpacker,
                             read_size=5,
                             max_buffer_size=3)
    unpacker = Unpacker(read_size=3, max_buffer_size=3)
    unpacker.feed(b'fo')
    nose.tools.assert_raises(BufferFull, unpacker.feed, b'ob')
    unpacker.feed(b'o')
    assert ord('f') == next(unpacker)
    unpacker.feed(b'b')
    assert ord('o') == next(unpacker)
    assert ord('o') == next(unpacker)
    assert ord('b') == next(unpacker)
Ejemplo n.º 46
0
def merge_attached_breadcrumbs(mpack_breadcrumbs, data):
    """
    Merges breadcrumbs attached in the ``__sentry-breadcrumbs`` attachment(s).
    """
    size = mpack_breadcrumbs.size
    if size == 0 or size > MAX_MSGPACK_BREADCRUMB_SIZE_BYTES:
        return

    try:
        unpacker = Unpacker(mpack_breadcrumbs)
        breadcrumbs = list(unpacker)
    except (ValueError, UnpackException, ExtraData) as e:
        minidumps_logger.exception(e)
        return

    if not breadcrumbs:
        return

    current_crumbs = data.get("breadcrumbs")
    if not current_crumbs:
        data["breadcrumbs"] = breadcrumbs
        return

    current_crumb = next(
        (
            c
            for c in reversed(current_crumbs)
            if isinstance(c, dict) and c.get("timestamp") is not None
        ),
        None,
    )
    new_crumb = next(
        (
            c
            for c in reversed(breadcrumbs)
            if isinstance(c, dict) and c.get("timestamp") is not None
        ),
        None,
    )

    # cap the breadcrumbs to the highest count of either file
    cap = max(len(current_crumbs), len(breadcrumbs))

    if current_crumb is not None and new_crumb is not None:
        if dp.parse(current_crumb["timestamp"]) > dp.parse(new_crumb["timestamp"]):
            data["breadcrumbs"] = breadcrumbs + current_crumbs
        else:
            data["breadcrumbs"] = current_crumbs + breadcrumbs
    else:
        data["breadcrumbs"] = current_crumbs + breadcrumbs

    data["breadcrumbs"] = data["breadcrumbs"][-cap:]
Ejemplo n.º 47
0
 def connect(self):
     self.state = None
     self.reader, self.writer = yield from asyncio.open_connection(
         "gimel", 8000)
     self.writer.write(
         packb({
             "type": "connect",
             "username": "******",
             "password": "",
             "async": True
         }))
     print("Connected.")
     self.writer.write(packb({"type": "activate"}))
     self.unpacker = Unpacker()
     #...
     asyncio. async (self.receiveandunpack())
     loop.call_soon(self.run)
     #self.move("up") #oder down oder halt
     self.writer.write(packb({"type": "shout", "foo": "bar"}))
     self.writer.write(packb({"type": "get_state"}))
     loop.call_later(5, lambda: self.go_to_level(8))
     loop.call_later(10, lambda: self.go_to_level(4))
Ejemplo n.º 48
0
def test_unpacker_hook_refcnt():
    result = []

    def hook(x):
        result.append(x)
        return x

    basecnt = sys.getrefcount(hook)

    up = Unpacker(object_hook=hook, list_hook=hook)

    assert sys.getrefcount(hook) >= basecnt + 2

    up.feed(packb([{}]))
    up.feed(packb([{}]))
    assert up.unpack() == [{}]
    assert up.unpack() == [{}]
    assert result == [{}, [{}], {}, [{}]]

    del up

    assert sys.getrefcount(hook) == basecnt
Ejemplo n.º 49
0
def test_correct_type_nested_array():
    unpacker = Unpacker()
    unpacker.feed(packb({'a': ['b', 'c', 'd']}))
    try:
        unpacker.read_array_header()
        assert 0, 'should raise exception'
    except UnexpectedTypeException:
        assert 1, 'okay'
Ejemplo n.º 50
0
def test_incorrect_type_array():
    unpacker = Unpacker()
    unpacker.feed(packb(1))
    try:
        unpacker.read_array_header()
        assert 0, "should raise exception"
    except UnexpectedTypeException:
        assert 1, "okay"
Ejemplo n.º 51
0
def test_incorrect_type_nested_map():
    unpacker = Unpacker()
    unpacker.feed(packb([{"a": "b"}]))
    try:
        unpacker.read_map_header()
        assert 0, "should raise exception"
    except UnexpectedTypeException:
        assert 1, "okay"
Ejemplo n.º 52
0
def test_correct_type_nested_array():
    unpacker = Unpacker()
    unpacker.feed(packb({"a": ["b", "c", "d"]}))
    try:
        unpacker.read_array_header()
        assert 0, "should raise exception"
    except UnexpectedTypeException:
        assert 1, "okay"
Ejemplo n.º 53
0
def test_incorrect_type_nested_map():
    unpacker = Unpacker()
    unpacker.feed(packb([{'a': 'b'}]))
    try:
        unpacker.read_map_header()
        assert 0, 'should raise exception'
    except UnexpectedTypeException:
        assert 1, 'okay'
Ejemplo n.º 54
0
def test_write_bytes():
    unpacker = Unpacker()
    unpacker.feed(b'abc')
    f = six.BytesIO()
    assert unpacker.unpack(f.write) == ord('a')
    assert f.getvalue() == b'a'
    f = six.BytesIO()
    assert unpacker.skip(f.write) is None
    assert f.getvalue() == b'b'
    f = six.BytesIO()
    assert unpacker.skip() is None
    assert f.getvalue() == b''
Ejemplo n.º 55
0
def tile_contents(tile, table):
    """
    Generator yielding each item in a gzipped msgpack format file.

    TODO: This should be generalised? Perhaps move into formatter classes?
    """

    file_name = 'tiles/osm/%s/%d/%d/%d.msgpack.gz' % \
        (table, tile.z, tile.x, tile.y)

    with BufferedReader(gzip.open(file_name, 'rb')) as gz:
        unpacker = Unpacker(file_like=gz)
        for obj in unpacker:
            yield obj
Ejemplo n.º 56
0
def tile_contents(tile, table, extension):
    """
    Generator yielding each item in a msgpack format file.

    TODO: This should be generalised? Perhaps move into formatter classes?
    """

    file_name = 'tiles/osm/%s/%d/%d/%d%s' % \
        (table, tile.z, tile.x, tile.y, extension)

    with open(file_name, 'rb') as f:
        unpacker = Unpacker(file_like=f)
        for obj in unpacker:
            yield obj
Ejemplo n.º 57
0
def test_foobar():
    unpacker = Unpacker(read_size=3)
    unpacker.feed(b'foobar')
    assert unpacker.unpack() == ord(b'f')
    assert unpacker.unpack() == ord(b'o')
    assert unpacker.unpack() == ord(b'o')
    assert unpacker.unpack() == ord(b'b')
    assert unpacker.unpack() == ord(b'a')
    assert unpacker.unpack() == ord(b'r')
    try:
        o = unpacker.unpack()
        assert 0, "should raise exception"
    except StopIteration:
        assert 1, "ok"

    unpacker.feed(b'foo')
    unpacker.feed(b'bar')

    k = 0
    for o, e in zip(unpacker, 'foobarbaz'):
        assert o == ord(e)
        k += 1
    assert k == len(b'foobar')
Ejemplo n.º 58
0
def test_auto_max_array_len():
    packed = b'\xde\x00\x06zz'
    with pytest.raises(UnpackValueError):
        unpackb(packed, raw=False)

    unpacker = Unpacker(max_buffer_size=5, raw=False)
    unpacker.feed(packed)
    with pytest.raises(UnpackValueError):
        unpacker.unpack()
Ejemplo n.º 59
0
    def __iter__(self):
        """Iterate endlessly over all objects sent by the producer

        Internally, this method uses a receiving buffer that is lost if
        interrupted (GeneratorExit). If this buffer was not empty, the queue
        is left in a inconsistent state and this method can't be called again.

        So the correct way to split a loop is to first get an iterator
        explicitly:
            iq = iter(queue)
            for x in iq:
                if ...:
                    break
            for x in iq:
                ...
        """
        unpacker = Unpacker(use_list=False, raw=True)
        feed = unpacker.feed
        max_size = self._max_size
        array = self._array
        pos = self._pos
        size = self._size
        lock, get_lock, put_lock = self._locks
        left = 0
        while 1:
            for data in unpacker:
                yield data
            while 1:
                with lock:
                    p = pos.value
                    s = size.value
                if s:
                    break
                get_lock.acquire()
            e = p + s
            if e < max_size:
                feed(array[p:e])
            else:
                feed(array[p:])
                e -= max_size
                feed(array[:e])
            with lock:
                pos.value = e
                n = size.value
                size.value = n - s
            if n == max_size:
                put_lock.acquire(0)
                put_lock.release()
Ejemplo n.º 60
0
def test_auto_max_map_len():
    # len(packed) == 6 -> max_map_len == 3
    packed = b'\xde\x00\x04zzz'
    with pytest.raises(UnpackValueError):
        unpackb(packed, raw=False)

    unpacker = Unpacker(max_buffer_size=6, raw=False)
    unpacker.feed(packed)
    with pytest.raises(UnpackValueError):
        unpacker.unpack()