Exemple #1
0
def encode_command(command_code, *arguments):
    """
    Encodes a command of the given type with given arguments.
    """
    p = Packer(use_bin_type=True)
    obj = list(arguments)
    return p.pack(COMMAND_SET_VERSION) + p.pack(command_code) + p.pack(obj)
def test_get_buffer():
    packer = Packer(autoreset=0, use_bin_type=True)
    packer.pack([1, 2])
    strm = BytesIO()
    strm.write(packer.getbuffer())
    written = strm.getvalue()

    expected = packb([1, 2], use_bin_type=True)
    assert written == expected
Exemple #3
0
def test_get_buffer():
    packer = Packer(autoreset=0, use_bin_type=True)
    packer.pack([1, 2])
    strm = BytesIO()
    strm.write(packer.getbuffer())
    written = strm.getvalue()

    expected = packb([1, 2], use_bin_type=True)
    assert written == expected
Exemple #4
0
class MessagePackEncoder(object):
    """MessagePack Encoder."""
    def __init__(self, fmt, stream, **kwargs):
        """Create MessagePack Encoder.

        :param stream: Output stream
        :type stream: file or io.IOBase
        """
        self.format = fmt
        self.stream = stream

        self.packer = Packer(**kwargs)

    def encode(self, items):
        """Encode items.

        :param items: Items
        :type items: generator
        """
        if self.format.structure == MessagePackCollectionStructure.Dictionary:
            return self._encode_dictionary(items)

        if self.format.structure == MessagePackCollectionStructure.List:
            return self._encode_list(items)

        if self.format.structure == MessagePackCollectionStructure.Objects:
            return self._encode_objects(items)

        raise ValueError('Invalid encoder mode: %s' %
                         (self.format.structure, ))

    def _encode_dictionary(self, items):
        """Encode :code:`items` to dictionary.

        :param items: Items
        :type items: generator
        """
        self.stream.write(self.packer.pack(DictionaryEmitter(items)))

    def _encode_list(self, items):
        """Encode :code:`items` to list.

        :param items: Items
        :type items: generator
        """
        raise NotImplementedError

    def _encode_objects(self, items):
        """Encode :code:`items` to individual objects.

        :param items: Items
        :type items: generator
        """
        for _, item in items:
            self.stream.write(self.packer.pack(item))
def testMapSize(sizes=[0, 5, 50, 1000]):
    bio = BytesIO()
    packer = Packer()
    for size in sizes:
        bio.write(packer.pack_map_header(size))
        for i in range(size):
            bio.write(packer.pack(i))  # key
            bio.write(packer.pack(i * 2))  # value

    bio.seek(0)
    unpacker = Unpacker(bio)
    for size in sizes:
        assert unpacker.unpack() == dict((i, i * 2) for i in range(size))
Exemple #6
0
def testMapSize(sizes=[0, 5, 50, 1000]):
    bio = BytesIO()
    packer = Packer()
    for size in sizes:
        bio.write(packer.pack_map_header(size))
        for i in range(size):
            bio.write(packer.pack(i)) # key
            bio.write(packer.pack(i * 2)) # value

    bio.seek(0)
    unpacker = Unpacker(bio)
    for size in sizes:
        assert unpacker.unpack() == dict((i, i * 2) for i in range(size))
Exemple #7
0
def test_manualreset(sizes=[0, 5, 50, 1000]):
    packer = Packer(autoreset=False)
    for size in sizes:
        packer.pack_array_header(size)
        for i in range(size):
            packer.pack(i)

    bio = BytesIO(packer.bytes())
    unpacker = Unpacker(bio, use_list=1)
    for size in sizes:
        assert unpacker.unpack() == list(range(size))

    packer.reset()
    assert packer.bytes() == b''
def test_manualreset(sizes=[0, 5, 50, 1000]):
    packer = Packer(autoreset=False)
    for size in sizes:
        packer.pack_array_header(size)
        for i in range(size):
            packer.pack(i)

    bio = BytesIO(packer.bytes())
    unpacker = Unpacker(bio, use_list=1)
    for size in sizes:
        assert unpacker.unpack() == list(range(size))

    packer.reset()
    assert packer.bytes() == b''
    def __init__(self, mod_conf, pub_endpoint, serialize_to):
        from zmq import Context, PUB

        BaseModule.__init__(self, mod_conf)
        self.pub_endpoint = pub_endpoint
        self.serialize_to = serialize_to
        logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint)

        # This doesn't work properly in init()
        # sometimes it ends up beings called several
        # times and the address becomes already in use.
        self.context = Context()
        self.s_pub = self.context.socket(PUB)
        self.s_pub.bind(self.pub_endpoint)

        # Load the correct serialization function
        # depending on the serialization method
        # chosen in the configuration.
        if self.serialize_to == "msgpack":
            from msgpack import Packer

            packer = Packer(default=encode_monitoring_data)
            self.serialize = lambda msg: packer.pack(msg)
        elif self.serialize_to == "json":
            self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder)
        else:
            raise Exception("[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!")
Exemple #10
0
    def test_packer_unpacker(self):
        buf = BytesIO()
        packer = Packer()
        buf.write(packer.pack(1))
        buf.write(packer.pack('2'))
        buf.write(packer.pack({}))
        buf.seek(0)
        unpacker = Unpacker(buf)
        v1 = unpacker.unpack()
        self.assertEqual(1, v1)

        v2 = unpacker.unpack()
        self.assertEqual('2', v2)

        v3 = unpacker.unpack()
        self.assertTrue(isinstance(v3, dict))
Exemple #11
0
    def _schedule(self, batch, timestamp):
        """
        Row - portion of the queue for each partition id created at some point in time
        Row Key - partition id + score interval + random_str
        Column Qualifier - discrete score (first three digits after dot, e.g. 0.001_0.002, 0.002_0.003, ...)
        Value - QueueCell msgpack blob

        Where score is mapped from 0.0 to 1.0
        score intervals are
          [0.01-0.02)
          [0.02-0.03)
          [0.03-0.04)
         ...
          [0.99-1.00]
        random_str - the time when links was scheduled for retrieval, microsecs

        :param batch: iterable of Request objects
        :return:
        """
        def get_interval(score, resolution):
            if score < 0.0 or score > 1.0:
                raise OverflowError

            i = int(score / resolution)
            if i % 10 == 0 and i > 0:
                i = i - 1  # last interval is inclusive from right
            return (i * resolution, (i + 1) * resolution)

        random_str = int(time() * 1E+6)
        data = dict()
        for request, score in batch:
            domain = request.meta[b'domain']
            fingerprint = request.meta[b'fingerprint']
            key = self.partitioner.get_key(request)
            partition_id = self.partitioner.partition(key)
            host_crc32 = domain if type(domain) == int else get_crc32(key)
            item = (unhexlify(fingerprint), host_crc32,
                    self.encoder.encode_request(request), score)
            score = 1 - score  # because of lexicographical sort in HBase
            rk = "%d_%s_%d" % (partition_id, "%0.2f_%0.2f" %
                               get_interval(score, 0.01), random_str)
            data.setdefault(rk, []).append((score, item))

        table = self.connection.table(self.table_name)
        with table.batch(transaction=True) as b:
            for rk, tuples in six.iteritems(data):
                obj = dict()
                for score, item in tuples:
                    column = 'f:%0.3f_%0.3f' % get_interval(score, 0.001)
                    obj.setdefault(column, []).append(item)

                final = dict()
                packer = Packer()
                for column, items in six.iteritems(obj):
                    stream = BytesIO()
                    for item in items:
                        stream.write(packer.pack(item))
                    final[column] = stream.getvalue()
                final[b'f:t'] = str(timestamp)
                b.put(rk, final)
Exemple #12
0
    def test_packer_unpacker(self):
        buf = BytesIO()
        packer = Packer()
        buf.write(packer.pack(1))
        buf.write(packer.pack('2'))
        buf.write(packer.pack({}))
        buf.seek(0)
        unpacker = Unpacker(buf)
        v1 = unpacker.unpack()
        self.assertEqual(1, v1)

        v2 = unpacker.unpack()
        self.assertEqual('2', v2)

        v3 = unpacker.unpack()
        self.assertTrue(isinstance(v3, dict))
Exemple #13
0
    def _schedule(self, batch):
        """
        Row - portion of the queue for each partition id created at some point in time
        Row Key - partition id + score interval + timestamp
        Column Qualifier - discrete score (first three digits after dot, e.g. 0.001_0.002, 0.002_0.003, ...)
        Value - QueueCell msgpack blob

        Where score is mapped from 0.0 to 1.0
        score intervals are
          [0.01-0.02)
          [0.02-0.03)
          [0.03-0.04)
         ...
          [0.99-1.00]
        timestamp - the time when links was scheduled for retrieval.

        :param batch: list of tuples(score, fingerprint, domain, url)
        :return:
        """
        def get_interval(score, resolution):
            if score < 0.0 or score > 1.0:
                raise OverflowError

            i = int(score / resolution)
            if i % 10 == 0 and i > 0:
                i = i - 1  # last interval is inclusive from right
            return (i * resolution, (i + 1) * resolution)

        timestamp = int(time() * 1E+6)
        data = dict()
        for score, fingerprint, domain, url in batch:
            if type(domain) == dict:
                partition_id = self.partitioner.partition(domain['name'], self.partitions)
                host_crc32 = get_crc32(domain['name'])
            elif type(domain) == int:
                partition_id = self.partitioner.partition_by_hash(domain, self.partitions)
                host_crc32 = domain
            else:
                raise TypeError("domain of unknown type.")
            item = (unhexlify(fingerprint), host_crc32, url, score)
            score = 1 - score  # because of lexicographical sort in HBase
            rk = "%d_%s_%d" % (partition_id, "%0.2f_%0.2f" % get_interval(score, 0.01), timestamp)
            data.setdefault(rk, []).append((score, item))

        table = self.connection.table(self.table_name)
        with table.batch(transaction=True) as b:
            for rk, tuples in data.iteritems():
                obj = dict()
                for score, item in tuples:
                    column = 'f:%0.3f_%0.3f' % get_interval(score, 0.001)
                    obj.setdefault(column, []).append(item)

                final = dict()
                packer = Packer()
                for column, items in obj.iteritems():
                    stream = BytesIO()
                    for item in items:
                        stream.write(packer.pack(item))
                    final[column] = stream.getvalue()
                b.put(rk, final)
Exemple #14
0
    def __init__(self, mod_conf, pub_endpoint, serialize_to):
        from zmq import Context, PUB
        BaseModule.__init__(self, mod_conf)
        self.pub_endpoint = pub_endpoint
        self.serialize_to = serialize_to
        logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint)

        # This doesn't work properly in init()
        # sometimes it ends up beings called several
        # times and the address becomes already in use.
        self.context = Context()
        self.s_pub = self.context.socket(PUB)
        self.s_pub.bind(self.pub_endpoint)

        # Load the correct serialization function
        # depending on the serialization method
        # chosen in the configuration.
        if self.serialize_to == "msgpack":
            from msgpack import Packer
            packer = Packer(default=encode_monitoring_data)
            self.serialize = lambda msg: packer.pack(msg)
        elif self.serialize_to == "json":
            self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder)
        else:
            raise Exception(
                "[Zmq Broker] No valid serialization method defined (Got " +
                str(self.serialize_to) + ")!")
Exemple #15
0
def main(name):
   global socket_map, gps_lock, font, caution_written
   socket_map = generate_map(name)
   gps_lock = Lock()

   t1 = Thread(target = cpuavg)
   t1.daemon = True
   t1.start()

   t2 = Thread(target = pmreader)
   t2.daemon = True
   t2.start()

   t3 = Thread(target = gps)
   t3.daemon = True
   t3.start()

   socket = generate_map('aircomm_app')['out']
   packer = Packer(use_single_float = True)
   while True:
      try:
         data = [BCAST_NOFW, HEARTBEAT, int(voltage * 10), int(current * 10), int(load), mem_used(), critical]
         with gps_lock:
            try:
               if gps_data.fix >= 2:
                  data += [gps_data.lon, gps_data.lat]
            except:
               pass
         socket.send(packer.pack(data))
      except Exception, e:
         pass
      sleep(1.0)
Exemple #16
0
def testPackUnicode():
    test_data = ["", "abcd", ["defgh"], "Русский текст"]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8')
        assert re == td
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding=str('utf-8'), use_list=1).unpack()
        assert re == td
Exemple #17
0
def testPackUnicode():
    test_data = ["", "abcd", ["defgh"], "Русский текст"]
    for td in test_data:
        re = unpackb(packb(td), use_list=1, raw=False)
        assert re == td
        packer = Packer()
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), raw=False, use_list=1).unpack()
        assert re == td
Exemple #18
0
def testPackUnicode():
    test_data = [six.u(""), six.u("abcd"), [six.u("defgh")], six.u("Русский текст")]
    for td in test_data:
        re = unpackb(packb(td, encoding="utf-8"), use_list=1, encoding="utf-8")
        assert_equal(re, td)
        packer = Packer(encoding="utf-8")
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding="utf-8", use_list=1).unpack()
        assert_equal(re, td)
Exemple #19
0
def testPackUnicode():
    test_data = ["", "abcd", ["defgh"], "Русский текст"]
    for td in test_data:
        re = unpackb(packb(td), use_list=1, raw=False)
        assert re == td
        packer = Packer()
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), raw=False, use_list=1).unpack()
        assert re == td
Exemple #20
0
class MsgpackStream(object):

    """Two-way msgpack stream that wraps a event loop byte stream.

    This wraps the event loop interface for reading/writing bytes and
    exposes an interface for reading/writing msgpack documents.
    """

    def __init__(self, event_loop):
        """Wrap `event_loop` on a msgpack-aware interface."""
        self.loop = event_loop
        self._packer = Packer(unicode_errors=unicode_errors_default)
        self._unpacker = Unpacker(unicode_errors=unicode_errors_default)
        self._message_cb = None

    def threadsafe_call(self, fn):
        """Wrapper around `BaseEventLoop.threadsafe_call`."""
        self.loop.threadsafe_call(fn)

    def send(self, msg):
        """Queue `msg` for sending to Nvim."""
        pass # replaces next logging statement
        #debug('sent %s', msg)
        self.loop.send(self._packer.pack(msg))

    def run(self, message_cb):
        """Run the event loop to receive messages from Nvim.

        While the event loop is running, `message_cb` will be called whenever
        a message has been successfully parsed from the input stream.
        """
        self._message_cb = message_cb
        self.loop.run(self._on_data)
        self._message_cb = None

    def stop(self):
        """Stop the event loop."""
        self.loop.stop()

    def close(self):
        """Close the event loop."""
        self.loop.close()

    def _on_data(self, data):
        self._unpacker.feed(data)
        while True:
            try:
                pass # replaces next logging statement
                #debug('waiting for message...')
                msg = next(self._unpacker)
                pass # replaces next logging statement
                #debug('received message: %s', msg)
                self._message_cb(msg)
            except StopIteration:
                pass # replaces next logging statement
                #debug('unpacker needs more data...')
                break
Exemple #21
0
def testPackUnicode():
    test_data = ["", "abcd", ["defgh"], "Русский текст"]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8')
        assert re == td
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding=str('utf-8'),
                      use_list=1).unpack()
        assert re == td
Exemple #22
0
class MsgpackStream(object):

    """Two-way msgpack stream that wraps a event loop byte stream.

    This wraps the event loop interface for reading/writing bytes and
    exposes an interface for reading/writing msgpack documents.
    """

    def __init__(self, event_loop):
        """Wrap `event_loop` on a msgpack-aware interface."""
        self.loop = event_loop
        self._packer = Packer(encoding='utf-8',
                              unicode_errors=unicode_errors_default)
        self._unpacker = Unpacker()
        self._message_cb = None

    def threadsafe_call(self, fn):
        """Wrapper around `BaseEventLoop.threadsafe_call`."""
        self.loop.threadsafe_call(fn)

    def send(self, msg):
        """Queue `msg` for sending to Nvim."""
        debug('sent %s', msg)
        self.loop.send(self._packer.pack(msg))

    def run(self, message_cb):
        """Run the event loop to receive messages from Nvim.

        While the event loop is running, `message_cb` will be called whenever
        a message has been successfully parsed from the input stream.
        """
        self._message_cb = message_cb
        self.loop.run(self._on_data)
        self._message_cb = None

    def stop(self):
        """Stop the event loop."""
        self.loop.stop()

    def close(self):
        """Close the event loop."""
        self.loop.close()

    def _on_data(self, data):
        self._unpacker.feed(data)
        while True:
            try:
                debug('waiting for message...')
                msg = next(self._unpacker)
                debug('received message: %s', msg)
                self._message_cb(msg)
            except StopIteration:
                debug('unpacker needs more data...')
                break
Exemple #23
0
 def handle(self):
     packer = Packer(use_bin_type=True)
     unpacker = Unpacker(raw=False, max_buffer_size=10 * 1024 * 1024)
     while True:
         buf = self.cli_sock.recv(1024)
         if not buf:
             break
         unpacker.feed(buf)
         for request in unpacker:
             response = self.process(request)
             self.cli_sock.sendall(packer.pack(response))
Exemple #24
0
def testPackUnicode():
    test_data = [
        six.u(""), six.u("abcd"), [six.u("defgh")], six.u("Русский текст"),
        ]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8')
        assert re == td
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding='utf-8', use_list=1).unpack()
        assert re == td
Exemple #25
0
def testPackUnicode():
    test_data = [
        six.u(""), six.u("abcd"), [six.u("defgh")], six.u("Русский текст"),
        ]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8')
        assert re == td
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding='utf-8', use_list=1).unpack()
        assert re == td
def testPackUnicode():
    test_data = [
        "", "abcd", ("defgh",), "Русский текст",
        ]
    for td in test_data:
        re = unpacks(packs(td, encoding='utf-8'), encoding='utf-8')
        assert_equal(re, td)
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding='utf-8').unpack()
        assert_equal(re, td)
Exemple #27
0
def testPackUnicode():
    test_data = [
        six.u(""), six.u("abcd"), (six.u("defgh"),), six.u("Русский текст"),
        ]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), encoding='utf-8')
        assert_equal(re, td)
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding='utf-8').unpack()
        assert_equal(re, td)
def testPackUnicode():
    test_data = [
        six.u(""), six.u("abcd"), (six.u("defgh"),), six.u("Русский текст"),
        ]
    for td in test_data:
        re = unpackb(packb(td, encoding='utf-8'), encoding='utf-8')
        assert_equal(re, td)
        packer = Packer(encoding='utf-8')
        data = packer.pack(td)
        re = Unpacker(BytesIO(data), encoding='utf-8').unpack()
        assert_equal(re, td)
Exemple #29
0
def testArraySize(sizes=[0, 5, 50, 1000]):
    bio = BytesIO()
    packer = Packer()
    for size in sizes:
        bio.write(packer.pack_array_header(size))
        for i in range(size):
            bio.write(packer.pack(i))

    bio.seek(0)
    unpacker = Unpacker(bio, use_list=1)
    for size in sizes:
        assert unpacker.unpack() == list(range(size))
Exemple #30
0
def log_events():
    sock = ctx.socket(zmq.SUB)
    sock.bind("inproc://raw_events")
    sock.setsockopt(zmq.SUBSCRIBE, "")
    packer = Packer()
    with open('tweets.%d.msgpack' % int(time.time()), 'wb') as f:
        # intentionally writing the raw bytes and not parsing it here
        while True:
            msg = sock.recv()
            LOG.debug('event received: %d bytes', len(msg))
            f.write(packer.pack({'time' : int(time.time()), 'event' : msg}))
            f.flush()
Exemple #31
0
def testArraySize(sizes=[0, 5, 50, 1000]):
    bio = BytesIO()
    packer = Packer()
    for size in sizes:
        bio.write(packer.pack_array_header(size))
        for i in range(size):
            bio.write(packer.pack(i))

    bio.seek(0)
    unpacker = Unpacker(bio, use_list=1)
    for size in sizes:
        assert unpacker.unpack() == list(range(size))
Exemple #32
0
class File(object):
    def __init__(self, io):
        self.io = io
        self.packer = Packer(use_bin_type=True)

    def write(self, *args):
        args = tuple(bytes(x) if isinstance(x, buffer) else x
                     for x in args)
        self.io.write(self.packer.pack(args))

    def close(self):
        self.io.flush()
        self.io.close()
Exemple #33
0
class File(object):
    def __init__(self, io):
        self.io = io
        self.packer = Packer(use_bin_type=True)

    def write(self, *args):
        self.io.write(self.packer.pack(args))

    def flush(self):
        self.io.flush()

    def close(self):
        self.io.close()
Exemple #34
0
 def request(self, command, **params):
     packer = Packer(use_bin_type=True)
     unpacker = Unpacker(raw=False, max_buffer_size=10 * 1024 * 1024)
     request = BackdoorRequest(command, params)
     LOG.debug(f'backdoor client sending request {request}')
     self.sock.sendall(packer.pack(request.to_dict()))
     while True:
         buf = self.sock.recv(1024)
         if not buf:
             break
         unpacker.feed(buf)
         for response in unpacker:
             response = BackdoorResponse(response['ok'],
                                         response['content'])
             LOG.debug(f'backdoor client received response {response}')
             return response
Exemple #35
0
class MsgpackWriter(object):
    def __init__(self, f):
        self.f = f
        self.packer = Packer()

    def pack(self, obj):
        self.f.write(self.packer.pack(obj))

    def write_document(self, doc):
        """
        Writes a complete EXML document

        :param doc Document: the document to write
        """
        packer = self.packer
        self.f.write(
            packer.pack_array_header(3) +
            packer.pack('exml1'))
        self.write_schema(doc)
        self.write_chunks(doc)

    def write_schema(self, doc):
        """
        Writes the schema part of an EXML document

        :param doc: the document to write
        :type doc Document:
        """
        d = {}
        d['word'] = schema_to_dict(doc.t_schema)
        for schema in doc.schemas:
            d[schema.name] = schema_to_dict(schema)
        self.pack(d)

    def write_chunks(self, doc):
        self.f.write(self.packer.pack_array_header(1))
        self.write_chunk(doc, 0, None)

    def write_chunk(self, doc, start, end):
        if end is None:
            end = len(doc.words)
        self.f.write(self.packer.pack_array_header(2))
        # write terminals
        self.pack(objects_to_packed(doc, doc.w_objs, doc.t_schema, False))
        self.write_markables(doc)

    def write_markables(self, doc):
        markables_by_layer = defaultdict(list)
        for posn in sorted(doc.markables_by_start):
            for (mlevel, obj) in doc.markables_by_start[posn]:
                markables_by_layer[mlevel.name].append(obj)
        ne_levels = []
        for schema in doc.schemas:
            if schema.name not in markables_by_layer:
                continue
            ne_levels.append(schema.name)
        self.f.write(self.packer.pack_map_header(len(ne_levels)))
        for schema in doc.schemas:
            if schema.name not in markables_by_layer:
                continue
            objs = markables_by_layer[schema.name]
            self.pack(schema.name)
            self.pack(objects_to_packed(doc, objs, schema, True))
Exemple #36
0
 def persist(self, dictionary):
     cfg = self.app.config
     packer = Packer()
     with open(cfg['PERSISTENCE_FILE'], 'wb') as fh:
         fh.write(packer.pack(dictionary))
Exemple #37
0
def test_bad_hook():
	cp = Packer()
	packed = cp.pack([3, 1+2j])
	unpacked = unpacks(packed)
Exemple #38
0
class BlacknetServerThread(Thread):
    """ Server thread handling blacknet client connections """
    def __init__(self, bns, client):
        super(BlacknetServerThread, self).__init__()

        handler = {
            BlacknetMsgType.HELLO: self.handle_hello,
            BlacknetMsgType.CLIENT_NAME: self.handle_client_name,
            BlacknetMsgType.SSH_CREDENTIAL: self.handle_ssh_credential,
            BlacknetMsgType.SSH_PUBLICKEY: self.handle_ssh_publickey,
            BlacknetMsgType.PING: self.handle_ping,
            BlacknetMsgType.GOODBYE: self.handle_goodbye,
        }
        self.handler = handler
        self.started = False

        self.database = BlacknetDatabase(bns.config, bns.logger)
        self.__blacklist = bns.blacklist
        self.__client = None
        self.__connect_lock = Lock()
        self.__cursor = None
        self.__logger = bns.logger
        self.__mysql_error = 0
        self.__session_interval = bns.session_interval
        self.__unpacker = Unpacker(encoding='utf-8')
        self.__packer = Packer(encoding='utf-8')
        self.__dropped_count = 0
        self.__attempt_count = 0
        self.__atk_cache = {}
        self.__ses_cache = {}
        self.__key_cache = {}
        self.__test_mode = bns.test_mode

        peer = client.getpeername()
        self.__peer_ip = peer[0] if peer else "local"
        self.__use_ssl = (client.family != socket.AF_UNIX)

        client.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
        if self.__use_ssl:
            client = bns.ssl_context.wrap_socket(client, server_side=True)
        self.__client = client

        self.name = self.peername
        self.log_info("starting session (SSL: %s)" % self.__use_ssl)

    def __del__(self):
        self.disconnect()
        self.database.disconnect()

    def disconnect(self):
        self.__connect_lock.acquire()
        if self.__client:
            self.log_info("stopping session")
            try:
                self.__client.shutdown(socket.SHUT_RDWR)
            except socket.error:
                pass
            self.__client.close()
            self.__client = None
        self.__connect_lock.release()

    @property
    def peername(self):
        name = "unknown"
        client = self.__client
        if self.__use_ssl and client:
            cert = client.getpeercert()
            if 'subject' in cert:
                for item in cert['subject']:
                    if item[0][0] == "commonName":
                        name = item[0][1]
        return name

    def handle_sensor(self, client):
        running = True

        while running:
            try:
                buf = client.recv(8192)
            except socket.error as e:
                self.log_warning("socket error: %s" % e)
                break

            if not buf:
                break
            self.__unpacker.feed(buf)

            for (msgtype, data) in self.__unpacker:
                if msgtype in self.handler:
                    running = self.handler[msgtype](data)
                else:
                    self.handle_unknown(msgtype, data)
            self.database.commit()
        self.disconnect()

    def run(self):
        self.started = True
        client = self.__client

        try:
            self.handle_sensor(client)
        except Exception as e:
            self.log_warning("sensor exception: %s" % e)

    @property
    def cursor(self):
        if not self.__cursor:
            cursor = self.database.cursor()
            self.__cursor = cursor
        return self.__cursor

    def log(self, message, level=BLACKNET_LOG_DEFAULT):
        if self.__logger:
            peername = "%s (%s)" % (self.name, self.__peer_ip)
            self.__logger.write("%s: %s" % (peername, message), level)

    def log_error(self, message):
        self.log(message, BLACKNET_LOG_ERROR)

    def log_warning(self, message):
        self.log(message, BLACKNET_LOG_WARNING)

    def log_info(self, message):
        self.log(message, BLACKNET_LOG_INFO)

    def log_debug(self, message):
        self.log(message, BLACKNET_LOG_DEBUG)

    def __mysql_retry(self, function, *args):
        saved_exception = None

        for retry in range(BLACKNET_DATABASE_RETRIES):
            try:
                res = function(*args)
                self.__mysql_error = 0
                return res
            except MySQLError as e:
                if self.__mysql_error != e.args[0]:
                    self.__mysql_error = e.args[0]
                    self.log_warning("MySQL: %s" % e)

                self.__cursor = None
                self.database.disconnect()
                saved_exception = e
        raise saved_exception

    ## -- Message handling functions -- ##
    def handle_unknown(self, msgtype, data):
        self.log_error("unknown msgtype %u" % msgtype)

    def handle_hello(self, data):
        if data != BLACKNET_HELLO:
            self.log_error(
                "client reported buggy hello (got %s, expected %s)" %
                (data, BLACKNET_HELLO))
            return False
        return True

    def handle_ping(self, data):
        client = self.__client
        if client:
            self.log_debug("responding to ping request.")
            data = [BlacknetMsgType.PONG, None]
            client.send(self.__packer.pack(data))
        return True

    def handle_goodbye(self, data):
        client = self.__client
        if client:
            data = [BlacknetMsgType.GOODBYE, None]
            client.send(self.__packer.pack(data))
        return False

    def handle_client_name(self, data):
        if data != self.name:
            self.log_info("changing client name to %s" % data)
            self.name = data
        return True

    def __add_ssh_attacker(self, data):
        cursor = self.cursor

        ip = data['client']
        time = data['time']
        atk_id = blacknet_ip_to_int(ip)

        if atk_id not in self.__atk_cache:
            res = cursor.check_attacker(atk_id)
            if res is None:
                locid = cursor.get_locid(atk_id)
                if locid == BLACKNET_DEFAULT_LOCID:
                    self.log_info("no gelocation for client %s" % ip)
                dns = blacknet_gethostbyaddr(ip)
                args = (atk_id, ip, dns, time, time, locid, 0)
                cursor.insert_attacker(args)
                (first_seen, last_seen) = (time, time)
            else:
                (first_seen, last_seen) = res
            self.__atk_cache[atk_id] = (first_seen, last_seen)
        else:
            (first_seen, last_seen) = self.__atk_cache[atk_id]

        # Check attacker dates to update first_seen and last_seen fields.
        if first_seen and time < first_seen:
            self.__atk_cache[atk_id] = (time, last_seen)
            cursor.update_attacker_first_seen(atk_id, time)

        if last_seen and time > last_seen:
            self.__atk_cache[atk_id] = (first_seen, time)
            cursor.update_attacker_last_seen(atk_id, time)

        return atk_id

    def __add_ssh_session(self, data, atk_id):
        cursor = self.cursor
        sensor = self.name
        time = data['time']

        if atk_id not in self.__ses_cache:
            res = cursor.check_session(atk_id, sensor)
            if res is None:
                (ses_id, last_seen) = (0, 0)
            else:
                (ses_id, last_seen) = res
        else:
            (ses_id, last_seen) = self.__ses_cache[atk_id]

        session_limit = last_seen + self.__session_interval
        if time > session_limit:
            args = (atk_id, time, time, sensor)
            ses_id = cursor.insert_session(args)
        else:
            cursor.update_session_last_seen(ses_id, time)
        self.__ses_cache[atk_id] = (ses_id, time)

        return ses_id

    def __add_ssh_attempt(self, data, atk_id, ses_id):
        cursor = self.cursor
        # This happen while registering a pubkey authentication
        password = data['passwd'] if 'passwd' in data else None
        args = (atk_id, ses_id, data['user'], password, self.name,
                data['time'], data['version'])
        att_id = cursor.insert_attempt(args)
        return att_id

    def __add_ssh_pubkey(self, data, att_id):
        cursor = self.cursor
        fingerprint = data['kfp']

        if fingerprint not in self.__key_cache:
            res = cursor.check_pubkey(fingerprint)
            if res is None:
                args = (data['ktype'], data['kfp'], data['k64'], data['ksize'])
                key_id = cursor.insert_pubkey(args)
            else:
                key_id = res
            self.__key_cache[fingerprint] = key_id
        else:
            key_id = self.__key_cache[fingerprint]

        cursor.insert_attempts_pubkeys(att_id, key_id)
        return key_id

    def check_blacklist(self, data):
        user = data['user']
        if self.__blacklist.has(self.peername, user):
            msg = 'blacklisted user %s from %s using %s' % (
                user, data['client'], data['version'])
            self.log_info(msg)
            raise Exception(msg)

    def __handle_ssh_common(self, data):
        if self.__test_mode:
            data['client'] = '1.0.204.42'

        self.check_blacklist(data)
        atk_id = self.__mysql_retry(self.__add_ssh_attacker, data)
        ses_id = self.__mysql_retry(self.__add_ssh_session, data, atk_id)
        att_id = self.__mysql_retry(self.__add_ssh_attempt, data, atk_id,
                                    ses_id)
        return (atk_id, ses_id, att_id)

    def handle_ssh_credential(self, data):
        try:
            (atk_id, ses_id, att_id) = self.__handle_ssh_common(data)

        except Exception as e:
            self.log_info("credential error: %s" % e)
            self.__dropped_count += 1
        else:
            self.__attempt_count += 1
        return True

    def handle_ssh_publickey(self, data):
        try:
            (atk_id, ses_id, att_id) = self.__handle_ssh_common(data)
            key_id = self.__mysql_retry(self.__add_ssh_pubkey, data, att_id)
        except Exception as e:
            self.log_info("pubkey error: %s" % e)
            self.__dropped_count += 1
        else:
            self.__attempt_count += 1
        return True
class MsgpackStream(object):

    """Two-way msgpack stream that wraps a event loop byte stream.

    This wraps the event loop interface for reading/writing bytes and
    exposes an interface for reading/writing msgpack documents.
    """

    def __init__(self, event_loop):
        """Wrap `event_loop` on a msgpack-aware interface."""
        self._event_loop = event_loop
        self._posted = deque()
        self._packer = Packer(use_bin_type=True)
        self._unpacker = Unpacker()
        self._message_cb = None
        self._stopped = False

    def post(self, msg):
        """Post `msg` to the read queue of the `MsgpackStream` instance.

        Use the event loop `interrupt()` method to push msgpack objects from
        other threads.
        """
        self._posted.append(msg)
        self._event_loop.interrupt()

    def send(self, msg):
        """Queue `msg` for sending to Nvim."""
        debug('sent %s', msg)
        self._event_loop.send(self._packer.pack(msg))

    def run(self, message_cb):
        """Run the event loop to receive messages from Nvim.

        While the event loop is running, `message_cb` will be called whenever
        a message has been successfully parsed from the input stream.
        """
        self._message_cb = message_cb
        self._run()
        self._message_cb = None

    def stop(self):
        """Stop the event loop."""
        self._stopped = True
        self._event_loop.stop()

    def _run(self):
        self._stopped = False
        while not self._stopped:
            if self._posted:
                self._message_cb(self._posted.popleft())
                continue
            self._event_loop.run(self._on_data)

    def _on_data(self, data):
        self._unpacker.feed(data)
        while True:
            try:
                debug('waiting for message...')
                msg = next(self._unpacker)
                debug('received message: %s', msg)
                self._message_cb(msg)
            except StopIteration:
                debug('unpacker needs more data...')
                break
Exemple #40
0
class BlacknetClient(BlacknetSSLInterface):
    """ Holds all the underlying protocol exchanges with BlacknetMasterServer. """
    def __init__(self, config, logger=None):
        BlacknetSSLInterface.__init__(self, config, 'honeypot')
        self.__logger = logger
        self.__server_hostname = None
        self.__server_address = None
        self.__server_socket = None
        self.__server_error = False
        self.__client_name = None
        self.__connect_lock = RLock()
        self.__send_lock = Lock()
        self.__packer = Packer(encoding='utf-8')
        self.__unpacker = Unpacker(encoding='utf-8')

    def __del__(self):
        self.disconnect()

    def log(self, message, level=BLACKNET_LOG_DEFAULT):
        if self.__logger:
            self.__logger.write("Honeypot: %s" % message, level)
        else:
            sys.stdout.write("%s\n" % message)
            sys.stdout.flush()

    def log_error(self, message):
        self.log(message, BLACKNET_LOG_ERROR)

    def log_info(self, message):
        self.log(message, BLACKNET_LOG_INFO)

    def log_debug(self, message):
        self.log(message, BLACKNET_LOG_DEBUG)

    @property
    def server_hostname(self):
        if not self.__server_hostname:
            self.__server_hostname = self.ssl_config[2]
        return self.__server_hostname

    @property
    def server_is_sockfile(self):
        if not self._server_sockfile:
            self._server_sockfile = (not isinstance(self.server_address,
                                                    tuple))
        return self._server_sockfile

    @property
    def server_address(self):
        if not self.__server_address:
            self.__server_address = self.__get_server_address()
        return self.__server_address

    def __get_server_address(self):
        if self.has_config('server'):
            server = self.get_config('server').strip()
        else:
            server = "%s:%s" % (BLACKNET_SSL_DEFAULT_ADDRESS,
                                BLACKNET_SSL_DEFAULT_PORT)

        if server.startswith('/'):
            return server

        addr = server.split(':')
        address = addr[0]
        port = BLACKNET_SSL_DEFAULT_PORT
        if len(addr) > 1:
            try:
                port = int(addr[1])
            except ValueError as e:
                self.log_error("address port: %s" % e)
        return (address, port)

    @property
    def client_name(self):
        if not self.__client_name and self.has_config('name'):
            self.__client_name = self.get_config('name')
        return self.__client_name

    @property
    def _server_socket(self):
        send_handshake = False

        self.__connect_lock.acquire()
        try:
            if not self.__server_socket:
                self.__server_socket = self._connect()
                send_handshake = True
                if self.__server_error:
                    self.log_info("client reconnected successfully")
                else:
                    self.log_info("client connected successfully")
                self.__server_error = False
        except:
            self.__server_error = True
            raise
        finally:
            self.__connect_lock.release()

        if send_handshake:
            self._send_handshake()
        return self.__server_socket

    def _connect(self):
        """ Connect to the BlacknetMasterServer (without explicit locking) """

        tries = BLACKNET_CLIENT_CONN_RETRIES

        while tries:
            try:
                if self.server_is_sockfile:
                    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
                else:
                    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.connect(self.server_address)

                # Set keep-alive parameters to automatically close connection on error.
                sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
                if not self.server_is_sockfile:
                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE,
                                    15)
                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL,
                                    30)
                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT,
                                    BLACKNET_CLIENT_CONN_RETRIES)
                break
            except socket.error as e:
                if tries == BLACKNET_CLIENT_CONN_RETRIES and not self.__server_error:
                    self.log_error("socket error: %s" % e)
                tries -= 1
                if not tries:
                    raise

        if not self.server_is_sockfile:
            sock = self.ssl_context.wrap_socket(
                sock, server_hostname=self.server_hostname)
        return sock

    def disconnect(self, goodbye=True):
        self.__connect_lock.acquire()
        if self.__server_socket:
            if goodbye:
                try:
                    self._send_goodbye()
                    self._recv_goodbye()
                except:
                    pass

            try:
                self.__server_socket.shutdown(socket.SHUT_RDWR)
            except socket.error as e:
                pass
            self.__server_socket.close()
            self.__server_socket = None
        self.__connect_lock.release()

    def reload(self):
        super(BlacknetClient, self).reload()
        self.__client_name = None
        self.__server_hostname = None

        new_server_address = self.__get_server_address()
        if self.__server_address and self.__server_address != new_server_address:
            self.__server_address = new_server_address
            self.disconnect()

    def _recv_goodbye(self):
        try:
            sock = self._server_socket
            acceptable = select.select([sock], [], [],
                                       BLACKNET_CLIENT_GOODBYE_TIMEOUT)
            if acceptable[0]:
                buf = self._server_socket.recv(4096)
                self.__unpacker.feed(buf)

                for (msgtype, data) in self.__unpacker:
                    # This is the only message type we can receive here.
                    if msgtype == BlacknetMsgType.GOODBYE:
                        self.log_debug(
                            "client received goodbye acknowledgement.")
            else:
                self.log_info(
                    "client did not receive goodbye from server, quitting.")
        except Exception as e:
            self.log_error("goodbye error: %s" % e)

    def _send(self, msgtype, message=None):
        data = [msgtype, message]
        sock = self._server_socket

        pdata = self.__packer.pack(data)
        plen = len(pdata)

        # Ensure that all data is sent properly.
        while plen > 0:
            sent = sock.send(pdata)
            plen -= sent
            pdata = pdata[sent:]

    def _send_handshake(self):
        self._send(BlacknetMsgType.HELLO, BLACKNET_HELLO)
        if self.client_name:
            self._send(BlacknetMsgType.CLIENT_NAME, self.client_name)

    def _send_goodbye(self):
        self._send(BlacknetMsgType.GOODBYE)

    def _send_retry(self, msgtype, message, tries=2):
        while tries > 0:
            self.__send_lock.acquire()
            try:
                self._send(msgtype, message)
                tries = 0
            except Exception as e:
                self.disconnect(goodbye=False)
                tries -= 1
            finally:
                self.__send_lock.release()

    def send_ssh_credential(self, data):
        self._send_retry(BlacknetMsgType.SSH_CREDENTIAL, data)

    def send_ssh_publickey(self, data):
        self._send_retry(BlacknetMsgType.SSH_PUBLICKEY, data)

    def send_ping(self):
        answered = False

        self.__send_lock.acquire()
        try:
            self._send(BlacknetMsgType.PING)

            sock = self._server_socket
            acceptable = select.select([sock], [], [],
                                       BLACKNET_CLIENT_PING_TIMEOUT)
            if acceptable[0]:
                buf = self._server_socket.recv(4096)
                self.__unpacker.feed(buf)

                for (msgtype, data) in self.__unpacker:
                    # This is the only message type we can receive here.
                    if msgtype == BlacknetMsgType.PONG:
                        self.log_debug("client received pong acknowledgement.")
                        answered = True
            else:
                self.log_info(
                    "client did not receive pong from server, disconnecting.")
        except Exception as e:
            self.log_error("pong error: %s" % e)
        if not answered:
            self.disconnect(goodbye=False)
        self.__send_lock.release()
Exemple #41
0
 def serialize(cls, objs: Iterable[dict], fobj: IO[bytes]):
     packer = Packer(use_bin_type=True)
     for obj in objs:
         obj = byteify_attachments(obj)
         serialized = packer.pack(obj)
         fobj.write(serialized)
Exemple #42
0
    def _schedule(self, batch):
        """
        Row - portion of the queue for each partition id created at some point in time
        Row Key - partition id + score interval + timestamp
        Column Qualifier - discrete score (first three digits after dot, e.g. 0.001_0.002, 0.002_0.003, ...)
        Value - QueueCell msgpack blob

        Where score is mapped from 0.0 to 1.0
        score intervals are
          [0.01-0.02)
          [0.02-0.03)
          [0.03-0.04)
         ...
          [0.99-1.00]
        timestamp - the time when links was scheduled for retrieval.

        :param batch: list of tuples(score, fingerprint, domain, url)
        :return:
        """
        def get_crc32(name):
            return crc32(name) if type(name) is str else crc32(
                name.encode('utf-8', 'ignore'))

        def get_interval(score, resolution):
            if score < 0.0 or score > 1.0:
                raise OverflowError

            i = int(score / resolution)
            if i % 10 == 0 and i > 0:
                i = i - 1  # last interval is inclusive from right
            return (i * resolution, (i + 1) * resolution)

        timestamp = int(time() * 1E+6)
        data = dict()
        for score, fingerprint, domain, url in batch:
            if type(domain) == dict:
                partition_id = self.partitioner.partition(
                    domain['name'], self.partitions)
                host_crc32 = get_crc32(domain['name'])
            elif type(domain) == int:
                partition_id = self.partitioner.partition_by_hash(
                    domain, self.partitions)
                host_crc32 = domain
            else:
                raise TypeError("domain of unknown type.")
            item = (unhexlify(fingerprint), host_crc32, url, score)
            score = 1 - score  # because of lexicographical sort in HBase
            rk = "%d_%s_%d" % (partition_id, "%0.2f_%0.2f" %
                               get_interval(score, 0.01), timestamp)
            data.setdefault(rk, []).append((score, item))

        table = self.connection.table(self.table_name)
        with table.batch(transaction=True) as b:
            for rk, tuples in data.iteritems():
                obj = dict()
                for score, item in tuples:
                    column = 'f:%0.3f_%0.3f' % get_interval(score, 0.001)
                    obj.setdefault(column, []).append(item)

                final = dict()
                packer = Packer()
                for column, items in obj.iteritems():
                    stream = BytesIO()
                    for item in items:
                        stream.write(packer.pack(item))
                    final[column] = stream.getvalue()
                b.put(rk, final)
Exemple #43
0
def _encode_ext(obj):
    # Return msgpack extension type, limited to 0-127
    # In simple session test: # of tuples > # of UniqueNames > # of numpy arrays > the rest
    packer = Packer(**_packer_args)
    if isinstance(obj, tuple):
        # TODO: save as msgpack array without converting to list first
        # restored as a tuple
        return ExtType(12, packer.pack(list(obj)))
    if isinstance(obj, _UniqueName):
        return ExtType(0, _encode_unique_name(obj))
    if isinstance(obj, numpy.ndarray):
        # handle numpy array subclasses
        return ExtType(1, packer.pack(_encode_ndarray(obj)))
    if isinstance(obj, complex):
        # restored as a tuple
        return ExtType(2, packer.pack([obj.real, obj.imag]))
    if isinstance(obj, set):
        # TODO: save as msgpack array without converting to list first
        return ExtType(3, packer.pack(list(obj)))
    if isinstance(obj, frozenset):
        # TODO: save as msgpack array without converting to list first
        return ExtType(4, packer.pack(list(obj)))
    if isinstance(obj, OrderedDict):
        # TODO: save as msgpack array without converting to list first
        return ExtType(5, packer.pack(list(obj.items())))
    if isinstance(obj, deque):
        # TODO: save as msgpack array without converting to list first
        return ExtType(6, packer.pack(list(obj)))
    if isinstance(obj, datetime):
        return ExtType(7, packer.pack(obj.isoformat()))
    if isinstance(obj, timedelta):
        # restored as a tuple
        return ExtType(8,
                       packer.pack([obj.days, obj.seconds, obj.microseconds]))
    if isinstance(obj, Image.Image):
        return ExtType(9, _encode_image(obj))
    if isinstance(obj, (numpy.number, numpy.bool_, numpy.bool8)):
        # handle numpy scalar subclasses
        return ExtType(10, packer.pack(_encode_numpy_number(obj)))
    if isinstance(obj, FinalizedState):
        return ExtType(11, packer.pack(obj.data))
    if isinstance(obj, timezone):
        # TODO: save as msgpack array without converting to list first
        # restored as a tuple
        return ExtType(13, packer.pack(list(obj.__getinitargs__())))
    if isinstance(obj, (ndarray_int, ndarray_float, ndarray_complex)):
        return ExtType(14, packer.pack(list(obj.__reduce__()[1])))

    raise RuntimeError("Can't convert object of type: %s" % type(obj))
Exemple #44
0
class Server(object):
    def __init__(self, port, key):
        self._CHUNK_SIZE = 2048
        self._shouldRun = True

        self._port = port
        self._key = key.encode()
        self._server = self._createServerSocket()

        self._inputs = [self._server]
        self._outputs = []

        self._client = None
        self._clientAddress = None

        self._packer = Packer()
        self._unpacker = Unpacker()

        self._encoder = None
        self._decoder = None

        self._logger = logging.getLogger(__name__).getChild("Server")

        self._messageDispatcher = MessageDispatcher()
        self._workerPool = WorkerPool()
        self._taskArchive = TaskArchive()

    def start(self):
        self._workerPool.start()
        self._logger.info("Ready")
        while self._shouldRun:
            readable, writable, inError = select.select(
                self._inputs, self._outputs, self._inputs)

            self._handleReadable(readable)
            self._handleWritable(writable)
            self._handleError(inError)

    def stop(self):
        self._logger.debug("Shutting down.")
        self._shouldRun = False
        self._server.close()
        if self._client:
            self._client.close()
        self._workerPool.stop()

    def _createServerSocket(self):
        serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        serverSocket.bind(("localhost", self._port))
        serverSocket.listen(1)

        return serverSocket

    def _handleReadable(self, readable):
        try:
            for readableSocket in readable:
                if readableSocket is self._server:
                    self._logger.info("Client connecting")
                    if not self._client:
                        self._logger.info("Accepting client")
                        self._acceptClient()
                    else:
                        self._logger.info(
                            "Client already connected, rejecting new client..")
                        self._rejectClient()
                else:
                    self._readClientData(readableSocket)
        except ConnectionResetError as e:
            self._handleDisconnect(self._client, e)

    def _readClientData(self, client):
        data = client.recv(self._CHUNK_SIZE)
        if data:
            if client not in self._outputs:
                self._outputs.append(client)
            decrypted = self._decoder.decrypt(data)
            self._unpacker.feed(decrypted)
            self._processIncomingMessages()
        else:
            self._handleDisconnect(client)

    def _handleDisconnect(self, client, error=""):
        self._logger.info(
            f"Client disconnected {error}. Cleaning up connections and resetting tasks."
        )
        if client in self._outputs:
            self._outputs.remove(client)
        if client in self._inputs:
            self._inputs.remove(client)
        if client:
            client.close()
        self._client = None
        self._clientAddress = None
        self._taskArchive.clearAllTasks()

    def _processIncomingMessages(self):
        for message in self._unpacker:
            msg_obj = NetworkMessage(message)
            self._messageDispatcher.dispatchIncomingMessage(msg_obj)

    def _acceptClient(self):
        self._client, self._clientAddress = self._server.accept()
        self._inputs.append(self._client)
        self._logger.debug("Client connected")
        self._sendSessionKey()

    def _sendSessionKey(self):
        self._encoder = AES.new(self._key, AES.MODE_CFB)
        self._decoder = AES.new(self._key, AES.MODE_CFB, iv=self._encoder.iv)
        self._logger.debug(f"Setting up session with key: {self._encoder.iv}")
        encoded = self._encoder.encrypt(self._encoder.iv)
        packed = self._packer.pack({
            "iv": self._encoder.iv,
            "encodeTest": encoded
        })
        self._client.sendall(packed)
        self._logger.debug("Session data sent!")

    def _rejectClient(self):
        client, _ = self._server.accept()
        client.close()

    def _handleWritable(self, writable):
        try:
            for s in writable:
                try:
                    msg_obj = self._messageDispatcher.outgoing_message_queue.get_nowait(
                    )  # TODO Majd refaktor kicsit.
                    serialized = self._packer.pack(msg_obj.raw)
                    encrypted = self._encoder.encrypt(serialized)
                    s.sendall(encrypted)
                    self._messageDispatcher.outgoing_message_queue.task_done(
                    )  # TODO Majd refaktor kicsit.
                except Empty:
                    time.sleep(1)
        except OSError as e:
            self._handleDisconnect(self._client, e)

    def _handleError(self, inError):
        for s in inError:
            self._logger.error(f"Some error in handleError {s}")
            if s in self._inputs:
                self._inputs.remove(s)
            if s in self._outputs:
                self._outputs.remove(s)
            s.close()