Exemplo n.º 1
0
class StreamReader(Thread):
    MAX_SHUTDOWN_COUNTER = 6
    POLL_TIMEOUT = 1.0

    def __init__(self, server, topic, group_id):
        self._consumer = Consumer({
            'bootstrap.servers': server,
            'group.id': group_id,
            'auto.offset.reset': 'earliest'
        })
        self._consumer.subscribe([topic])
        self._shutdown_counter = 0
        self._queue = SimpleQueue()
        super().__init__()

    def fetch(self, block=True, timeout=None):
        while True:
            msg = self._consumer.poll(StreamReader.POLL_TIMEOUT)
            if msg is None or msg.error():
                self._shutdown_counter += 1
                if self._shutdown_counter == StreamReader.MAX_SHUTDOWN_COUNTER:
                    break
            else:
                yield msg.value().decode('utf-8')

    def run(self):
        for message in self.fetch():
            self._queue.put_nowait(message)
        self._queue.put_nowait(None)

    def queue(self):
        return self._queue
Exemplo n.º 2
0
def breadth_first_spelunking(player, graph):
    breadcrumbs = SimpleQueue()
    visited_rooms = set([])
    breadcrumbs.put_nowait([player.current_room.id])
    # retrace path as long as there are breadcrumbs in the queue
    while not breadcrumbs.qsize() is 0:
        path = breadcrumbs.get_nowait()
        last_room = path[-1]
        # Check if we hit an unexplored room
        if last_room not in visited_rooms:
            # Log it
            visited_rooms.add(last_room)
            # Check exits
            for room_exit in graph[last_room]:
                # Return path if we find an unexplored exit
                if graph[last_room][room_exit] is "?":
                    return path
                # Otherwise, keep on truckin'
                else:
                    breadcrumb_path = list(path)
                    breadcrumb_path.append(graph[last_room][room_exit])
                    breadcrumbs.put_nowait(breadcrumb_path)
    # when we run out of breadcrumbs in our queue, return an empty list
    # as we exit the loop since we're back at an unexplored path
    return list()
Exemplo n.º 3
0
 def __init__(self,
              create_func: Callable,
              create_args: Tuple = None,
              create_kwargs: Dict = None,
              min_size: int = 1,
              max_size: int = 4,
              name: str = None) -> None:
     assert create_func, 'A create function must be provided'
     assert 1 <= min_size <= max_size <= 32, (
         f'Pool size out of range: min={min_size}, max={max_size}')
     if create_args is None:
         create_args = ()
     if create_kwargs is None:
         create_kwargs = {}
     create_func = functools.partial(create_func, *create_args,
                                     **create_kwargs)
     pool = PoolQueue()
     for conn in [create_func() for _ in range(min_size)]:
         pool.put_nowait(conn)
     self._create_func = create_func
     self._pool = pool
     self._size = min_size
     self._closed = False
     self._lock = threading.Lock()
     self.min_size = min_size
     self.max_size = max_size
     self.name = name
Exemplo n.º 4
0
def processed_reader(func, items_to_read, support_files):  # pragma: no cover

    process_pool = []

    slots = 8
    reply_queue = SimpleQueue()

    send_queue = SimpleQueue()
    for item_index in range(slots):
        if item_index < len(items_to_read):
            send_queue.put(items_to_read[item_index])

    for i in range(slots):
        process = threading.Thread(
            target=_inner_process,
            args=(func, send_queue, reply_queue),
        )
        process.daemon = True
        process.start()
        process_pool.append(process)

    process_start_time = time.time()
    item_index = slots

    while any({p.is_alive() for p in process_pool}):
        try:
            records = b""
            while 1:
                records = reply_queue.get_nowait()
                if records == b"END OF RECORDS":
                    break
                yield from map(json, records)
            if item_index < len(items_to_read):
                # we use this mechanism to throttle reading blobs so we
                # don't exhaust memory
                send_queue.put_nowait(items_to_read[item_index])
                item_index += 1
            else:
                send_queue.put_nowait(TERMINATE_SIGNAL)

        except Empty:  # nosec
            if time.time(
            ) - process_start_time > MAXIMUM_SECONDS_PROCESSES_CAN_RUN:
                logging.error(
                    f"Sending TERMINATE to long running multi-processed processes after {MAXIMUM_SECONDS_PROCESSES_CAN_RUN} seconds total run time"
                )
                break
        except GeneratorExit:
            logging.error("GENERATOR EXIT DETECTED")
            break

    for process in process_pool:
        process.join()
Exemplo n.º 5
0
class StatsReporter(threading.Thread):
    def __init__(self, report_interval: int):
        super().__init__()
        self.report_interval = report_interval
        self.stop = threading.Event()
        self.stats_queue = SimpleQueue()

    def run(self):
        while not self.stop.wait(self.report_interval):
            pool_batch_stats = defaultdict(list)
            while not self.stats_queue.empty():
                pool_uid, batch_stats = self.stats_queue.get()
                pool_batch_stats[pool_uid].append(batch_stats)

            total_processed_batches = sum(
                len(pool_stats) for pool_stats in pool_batch_stats.values())
            logger.info(
                f'Processed {total_processed_batches} batches in last {self.report_interval} seconds:'
            )
            for pool_uid, pool_stats in pool_batch_stats.items():
                total_batches = len(pool_stats)
                total_examples = sum(batch_stats.batch_size
                                     for batch_stats in pool_stats)
                avg_batch_size = mean(batch_stats.batch_size
                                      for batch_stats in pool_stats)
                total_time = sum(batch_stats.processing_time
                                 for batch_stats in pool_stats)
                batches_to_time = total_batches / total_time
                batch_performance = f'{batches_to_time:.2f} ' + (
                    'batches/s' if batches_to_time > 1 else 's/batch')

                examples_to_time = total_examples / total_time
                example_performance = f'{examples_to_time:.2f} ' + (
                    'examples/s' if examples_to_time > 1 else 's/example')

                logger.info(
                    f'{pool_uid}: '
                    f'{total_batches} batches ({batch_performance}), '
                    f'{total_examples} examples ({example_performance}), '
                    f'avg batch size {avg_batch_size:.2f}')

    def report_stats(self, pool_uid, batch_size, processing_time):
        batch_stats = BatchStats(batch_size, processing_time)
        self.stats_queue.put_nowait((pool_uid, batch_stats))
Exemplo n.º 6
0
class ThreadedFunction(Thread):
    def __init__(self, function: Callable) -> None:
        super().__init__(daemon=True)

        self.__stop_event = Event()
        self.__function = function
        self.__call_queue = Queue()
        self.__result_map: Dict[UUID, Result] = dict()

    def run(self) -> None:
        while not self.__stop_event.is_set():
            uuid, args, kwargs = self.__call_queue.get()
            if (args != ()):
                if (kwargs != {}):
                    self.__result_map[uuid].resolve(
                        self.__function(*args, **kwargs))
                else:
                    self.__result_map[uuid].resolve(self.__function(*args))
            else:
                if (kwargs != {}):
                    self.__result_map[uuid].resolve(self.__function(**kwargs))
                else:
                    self.__result_map[uuid].resolve(self.__function())

    def stop(self) -> None:
        self.__stop_event.set()
        Thread.join(self)

    def __call__(self, *args, **kwargs) -> Any:
        uuid = uuid4()
        result = Result()
        self.__call_queue.put_nowait((uuid, args, kwargs))
        self.__result_map[uuid] = result
        return result

    def __del__(self) -> None:
        self.stop()
Exemplo n.º 7
0
class Consumer:
    def __init__(self,
                 broker,
                 schema_registry,
                 topic,
                 logging_enabled=False,
                 groupId="asgardConsumerGroup",
                 autocommit=True):
        """
        Initialiser for Confluent Consumer using AvroConsumer. 
        Each consumer can only be subscribed to one topic 
        Parameters
        ----------
        broker: str
            The URL of the broker (example: 'localhost:9092')
        schema_registry: str
            The URL of the confluent Schema Registry endpoint (example: 'http://localhost:8081')
        topic: str
            The topic to subscribe too
        logger: Logger object, Optional
            The logger object which will be used to log messages if provided
        groupId: str, Optional
            An optional groupId which can be used to loadbalance consumers default is "asgard"
        """
        """self.__consumer = AvroConsumer(
            {
                "bootstrap.servers": broker,
                "group.id": groupId,
                "schema.registry.url": schema_registry,
                "enable.auto.commit": autocommit 
            }
        )"""
        self.__consumer = KafkaConsumer({
            "bootstrap.servers": broker,
            "group.id": groupId,
            "enable.auto.commit": autocommit,
            "auto.offset.reset": "latest"
        })
        self.autocommit = autocommit
        if not autocommit:
            self.consumed_messages = SimpleQueue()
        self.__consumer.subscribe([topic])
        if logging_enabled:
            self.logger = logging.getLogger(__name__)
        else:
            self.logger = None

    def consume(self):
        """
        Method to consume and return message if exists and can be deserialized

        Returns
        -------
        str
            The recieved message payload as a string
        None
            No message has been recieved or an error has occured
        """
        msg = None
        try:
            msg = self.__consumer.poll(1)
        except SerializerError as e:
            self.__log_msg("Message deserialization has failed {}: {}".format(
                msg, e),
                           "See the following stack trace",
                           f"{traceback.format_exc()}",
                           delimeter="\n",
                           level="ERROR")
        except RuntimeError as e:
            self.__log_msg(
                "The consumer has been closed and cannot recieve messages",
                level="ERROR")
        except Exception as e:
            self.__log_msg("An unkown error has occured {}".format(e),
                           "See the following stack trace",
                           f"{traceback.format_exc()}",
                           delimeter="\n",
                           level="ERROR")
        if not msg is None:
            if msg.error():
                self.__log_msg("AvroConsumer error: {}".format(msg.error()),
                               level="ERROR")
            else:
                if not self.autocommit:
                    self.consumed_messages.put_nowait(msg)
                return json.loads(msg.value().decode()).get("payload")

    def __enter__(self):
        return self.__consumer

    def __exit__(self, *args):
        self.close()

    def __log_msg(
        self,
        *messages,
        level="NOTSET",
        delimeter=" ",
    ):
        levels = {
            "CRITICAL": logging.CRITICAL,
            "ERROR": logging.ERROR,
            "WARNING": logging.WARNING,
            "INFO": logging.INFO,
            "DEBUG": logging.DEBUG,
            "NOTSET": logging.NOTSET
        }
        msg = delimeter.join(messages)
        if self.logger is not None:
            if level not in levels:
                raise ValueError(
                    f"level {level} is not valid must be one of {list(levels.keys())}"
                )
            self.logger.log(levels[level], msg)
        else:
            if level is not None:
                print(f"LOGGED MESSAGE: {msg}")
            else:
                print(f"{level}: {msg}")

    def commit(self, asynchronous=True):
        if not self.autocommit and not self.consumed_messages.empty():
            msg = self.consumed_messages.get_nowait()
            self.__consumer.commit(msg, asynchronous=asynchronous)

    def close(self):
        """
        Close the consumer, Once called this object cannot be reused
        """
        self.__consumer.close()
Exemplo n.º 8
0
class ThreadPoolExecutor(ThreadPoolExecutorBase):
    __slots__ = (
        "__futures",
        "__pool",
        "__tasks",
        "__write_lock",
        "__thread_events",
    )

    def __init__(
        self,
        max_workers: int = max((cpu_count(), 4)),
        loop: AbstractEventLoop = None,
    ) -> None:
        """"""
        if loop:
            warnings.warn(DeprecationWarning("loop argument is obsolete"))

        self.__futures = set()  # type: typing.Set[Future[Any]]

        self.__thread_events = set()  # type: typing.Set[threading.Event]
        self.__tasks = SimpleQueue()  # type: SimpleQueue[Optional[WorkItem]]
        self.__write_lock = threading.RLock()

        pools = set()
        for idx in range(max_workers):
            pools.add(self._start_thread(idx))

        self.__pool = frozenset(pools,
                                )  # type: typing.FrozenSet[threading.Thread]

    def _start_thread(self, idx: int) -> threading.Thread:
        event = threading.Event()
        self.__thread_events.add(event)

        thread = threading.Thread(
            target=self._in_thread,
            name="[%d] Thread Pool" % idx,
            args=(event, ),
        )

        thread.daemon = True
        thread.start()
        return thread

    def _in_thread(self, event: threading.Event) -> None:
        while True:
            work_item = self.__tasks.get()

            if work_item is None:
                break

            try:
                if work_item.loop.is_closed():
                    log.warning(
                        "Event loop is closed. Call %r skipped",
                        work_item.func,
                    )
                    continue

                work_item()
            except asyncio.CancelledError:
                break
            finally:
                del work_item

        event.set()

    def submit(  # type: ignore
            self, fn: F, *args: Any, **kwargs: Any) -> Future:
        """
        Submit blocking function to the pool
        """
        if fn is None or not callable(fn):
            raise ValueError("First argument must be callable")

        loop = asyncio.get_event_loop()

        with self.__write_lock:
            future = loop.create_future()  # type: asyncio.Future[Any]
            self.__futures.add(future)
            future.add_done_callback(self.__futures.remove)

            self.__tasks.put_nowait(
                WorkItem(
                    func=fn,
                    args=args,
                    kwargs=kwargs,
                    future=future,
                    loop=loop,
                ), )

            return future

    def shutdown(self, wait: bool = True) -> None:
        for _ in self.__pool:
            self.__tasks.put_nowait(None)

        for f in filter(lambda x: not x.done(), self.__futures):
            f.set_exception(ThreadPoolException("Pool closed"))

        if not wait:
            return

        while not all(e.is_set() for e in self.__thread_events):
            time.sleep(0)

    def _adjust_thread_count(self) -> None:
        raise NotImplementedError

    def __del__(self) -> None:
        self.shutdown()
Exemplo n.º 9
0
class ThreadPoolExecutor(ThreadPoolExecutorBase):
    __slots__ = (
        '__futures',
        '__pool',
        '__tasks',
        '__write_lock',
        '__thread_events',
    )

    def __init__(self, max_workers=max((cpu_count(), 4)), loop=None):
        if loop:
            warnings.warn(DeprecationWarning("loop argument is obsolete"))

        self.__futures = set()

        self.__pool = set()
        self.__thread_events = set()
        self.__tasks = SimpleQueue()
        self.__write_lock = threading.RLock()

        for idx in range(max_workers):
            self.__pool.add(self._start_thread(idx))

        self.__pool = frozenset(self.__pool)

    def _start_thread(self, idx):
        event = threading.Event()
        self.__thread_events.add(event)

        thread = threading.Thread(target=self._in_thread,
                                  name="[%d] Thread Pool" % idx,
                                  args=(event, ))

        thread.daemon = True
        thread.start()
        return thread

    def _in_thread(self, event: threading.Event):
        while True:
            work_item = self.__tasks.get()

            if work_item is None:
                break

            try:
                if work_item.loop.is_closed():
                    log.warning("Event loop is closed. Call %r skipped",
                                work_item.func)
                    continue

                work_item()
            except asyncio.CancelledError:
                break
            finally:
                del work_item

        event.set()

    def submit(self, fn, *args, **kwargs):
        if fn is None or not callable(fn):
            raise ValueError('First argument must be callable')

        loop = asyncio.get_event_loop()

        with self.__write_lock:
            future = loop.create_future()  # type: asyncio.Future
            self.__futures.add(future)
            future.add_done_callback(self.__futures.remove)

            self.__tasks.put_nowait(
                WorkItem(func=fn,
                         args=args,
                         kwargs=kwargs,
                         future=future,
                         loop=loop))

            return future

    def shutdown(self, wait=True):
        for _ in self.__pool:
            self.__tasks.put_nowait(None)

        for f in filter(lambda x: not x.done(), self.__futures):
            f.set_exception(ThreadPoolException("Pool closed"))

        if not wait:
            return

        while not all(e.is_set() for e in self.__thread_events):
            time.sleep(0)

    def _adjust_thread_count(self):
        raise NotImplementedError

    def __del__(self):
        self.shutdown()
Exemplo n.º 10
0
class Connection:
    """Describes a connection either from the server to some client or from the client
    to the server

    Attributes:
        connection (socket.socket): how we communicate with the other entity
        address (str): where the entity connected from / where we connected to

        send_queue (queue[bytes]): the packets that we need to send
        rec_queue (queue[Packet]): the packets that they have sent us

        curr_send_packet (optional BytesIO): if we are currently trying to send a message
            to the client, this is the serialized message we are trying to send (that has
            already been removed from the send_queue)
        curr_rec (deque[bytes]): the things that we have in memory received
    """
    def __init__(self, connection: socket.socket, address: str) -> None:
        self.connection = connection
        self.address = address

        self.send_queue = Queue()
        self.rec_queue = Queue()

        self.curr_send_packet: io.BytesIO = None
        self.curr_rec = deque()

    def disconnected(self):
        """Returns True if the connection is dead for whatever reason, False otherwise"""
        return self.connection is None

    def update(self):
        """Handles sending and receiving packets in a non-blocking way. Must be called very
        regularly for send() and receive() to actually do anything
        """
        if self.disconnected():
            return

        try:
            self._handle_send()
            self._handle_rec()
        except BlockingIOError:
            pass
        except OSError:
            self.connection = None
            print(f'[networking.shared] connection lost')
            traceback.print_exc()

    def _handle_send(self):
        if self.curr_send_packet is None:
            if self.send_queue.empty():
                return
            packet_serd = self.send_queue.get_nowait()
            self.curr_send_packet = io.BytesIO()
            self.curr_send_packet.write(
                len(packet_serd).to_bytes(4, 'big', signed=False))
            self.curr_send_packet.write(packet_serd)
            self.curr_send_packet.seek(0, 0)

        for _ in range(128):  # avoid sending more than 512kb in one go
            block = self.curr_send_packet.read(BLOCK_SIZE)
            if not block:
                self.curr_send_packet = None
                return

            amt_sent = self.connection.send(block)
            if amt_sent < len(block):
                self.curr_send_packet.seek(amt_sent - len(block), 1)
                return

    def _try_from_recq(self, amt: int) -> typing.Optional[bytes]:
        """Tries to read the specified number of bytes from the receive queue.
        If this fails to get that many bytes the receive queue is effectively
        unaltered, otherwise the bytes are removed from the receive queue
        and returned"""

        if not self.curr_rec:
            return None
        if len(self.curr_rec) == 1 or len(self.curr_rec[0]) >= amt:
            # happy / most common case
            if len(self.curr_rec[0]) < amt:
                return None
            block = self.curr_rec.popleft()
            if len(block) == amt:
                return block

            self.curr_rec.appendleft(block[amt:])
            return block[:amt]

        result = io.BytesIO()
        curlen = 0
        while self.curr_rec:
            block = self.curr_rec.popleft()
            if curlen + len(block) == amt:
                # another happy / common case
                result.write(block)
                return result.getvalue()

            if curlen + len(block) < amt:
                result.write(block)
                continue

            result.write(block[:amt])
            self.curr_rec.appendleft(block[amt:])
            return result.getvalue()

        # didn't get enough data, but now the curr_rec queue is all merged
        # so we will get the top happy case
        self.curr_rec.appendleft(result.getvalue())
        return None

    def _handle_rec(self):
        for _ in range(128):  # avoid reading more than 512kb in one go
            block = self.connection.recv(BLOCK_SIZE)
            if not block:
                self.connection.close()
                self.connection = None
                break
            self.curr_rec.append(block)
            if len(block) < BLOCK_SIZE:
                break

        for _ in range(8):  # avoid parsing too many packets at once
            lenblock = self._try_from_recq(4)
            if not lenblock:
                return
            explen = int.from_bytes(lenblock, 'big', signed=False)
            block = self._try_from_recq(explen)
            if not block:
                self.curr_rec.appendleft(lenblock)
                return

            packet = ser.deserialize(block)
            if not isinstance(packet, packets.Packet):
                raise ValueError(
                    f'got non-packet {packet} (type={type(packet)})')
            self.rec_queue.put(packet)

    def send(self, packet: packets.Packet):
        """Sends this client the specified packet"""
        if self.disconnected():
            return
        self.send_queue.put_nowait(ser.serialize(packet))

    def send_serd(self, packet_serd: bytes):
        """Sends this client the serialized packet"""
        if self.disconnected():
            return
        self.send_queue.put_nowait(packet_serd)

    def read(self) -> typing.Optional[packets.Packet]:
        """Returns the packet from the client if there is one"""
        return self.rec_queue.get_nowait(
        ) if not self.rec_queue.empty() else None

    def has_pending(self, read=True, write=True) -> bool:
        """Returns True if there are pending sends / receives, False otherwise"""
        if write and not self.send_queue.empty():
            # have things to send still
            return True
        if read and not self.rec_queue.empty():
            # have things that we've parsed but haven't been read() yet
            return True
        if write and self.curr_send_packet is not None:
            # in the middle of sending something
            return True
        if read and self.curr_rec:
            # have things not yet parsed / incomplete
            return True
        return False
Exemplo n.º 11
0
class Watcher():
    @staticmethod
    def __openSerialPort__(port: str, verbose=False) -> SerialPort:
        serialPort = SerialPort(**options)
        serialPort.port = port
        try:
            serialPort.open()
            if verbose:
                print("Port {0} is opened".format(port))
        except Exception as e:
            print('Problem opening port ' + port)
            print(e)
            exit(-1)
        return serialPort

    @property
    def verbose(self) -> bool:
        return self.__startupArguments__.verbose

    @property
    def kind(self) -> str:
        return str(self.__startupArguments__.kind)

    def log(self, msg):
        console_log("{}:{}".format(self.kind, msg))

    def __init__(self, args: StartupArguments):
        self.__startupArguments__ = args
        self.__msgQueue__ = Queue()
        self.__input__ = self.__openSerialPort__(args.portFrom, args.verbose)
        self.__output__ = self.__openSerialPort__(args.portTo, args.verbose)
        self.__buffer__ = b''
        self.__sequenceEnding__ = False
        self.__parser__ = ParserFactory.createParser(args.kind)
        self.__sio__ = socketio.AsyncClient(reconnection=True)
        self.__nsp__ = '/' + self.kind
        self.__sio__.register_namespace(SocketHubNsp(self.__nsp__))
        self.__halt__ = False

    def __enqueueMessage__(self):
        self.__msgQueue__.put_nowait(self.__buffer__)

    async def __queueWatchCoro__(self) -> dict:
        message = "No data"
        buffer = b''
        try:
            buffer = self.__msgQueue__.get(timeout=5)
            message = self.__parser__.parse(buffer)
        except EmptyQueueException:
            self.log('EmptyQueueException Raised')
        finally:
            msg_type = int(self.__parser__.get_type(message).value)
            data = {'status': message, 'type': msg_type}
            if 'Unknown sequence' in message:
                data['buffer'] = str(buffer)
            return data

    async def __queueWatchTask__(self):
        while 1:
            task = asyncio.ensure_future(self.__queueWatchCoro__())
            data: dict = await task
            if self.verbose:
                self.log(data)
            if self.__sio__.connected:
                try:
                    await self.__sio__.emit('event',
                                            data=data,
                                            namespace=self.__nsp__)
                except Exception as e:
                    console_log('Emit error: ' + str(e))
                    self.__halt__ = True
            else:
                console_log('No connection to hub.')
                self.__halt__ = True

    async def __initializeSocketIOConnection__(self):
        try:
            await self.__sio__.connect('http://localhost:3000/',
                                       transports=['polling'])
        except Exception as e:
            console_log('Connection to hub failed: ' + str(e))
            self.__halt__ = True

    def __queueWatchWorker__(self):
        loop = asyncio.new_event_loop()
        asyncio.ensure_future(self.__queueWatchTask__(), loop=loop)
        self.log('Queue Watch Task is running')
        loop.run_forever()

    def __socketIOWorker__(self):
        loop = asyncio.new_event_loop()
        asyncio.ensure_future(self.__initializeSocketIOConnection__(),
                              loop=loop)
        loop.run_forever()

    # noinspection PyUnreachableCode
    def mainloop(self):
        self.log('Initializing...')
        queueWatcherThread = threading.Thread(target=self.__queueWatchWorker__)
        queueWatcherThread.setDaemon(True)
        queueWatcherThread.start()
        socketIOThread = threading.Thread(target=self.__socketIOWorker__)
        socketIOThread.setDaemon(True)
        socketIOThread.start()

        # Await for connection in sync manner
        while not self.__sio__.connected and not self.__halt__:
            pass

        if self.__sio__.connected:
            self.log('Watcher initialized!')
        else:
            self.log("Failed. Rerunning...")
            sys.exit(-1)

        try:
            while not self.__halt__:
                received_byte = self.__input__.read(size=1)
                if received_byte == b'\x02':
                    self.__buffer__ = b''
                elif received_byte == b'\x03':
                    if self.__sequenceEnding__ and received_byte == b'\x03':
                        self.__enqueueMessage__()
                        self.__sequenceEnding__ = False
                    elif self.__sequenceEnding__ and not received_byte == b'\x03' and self.verbose:
                        self.log('Malformed message received')
                    else:  #if __sequenceEnding__ was not yet raised
                        self.__sequenceEnding__ = True
                else:
                    self.__buffer__ += received_byte
                self.__output__.write(received_byte)
                self.__output__.flush()
        except KeyboardInterrupt:
            self.log('Terminating...')

        self.log('Halt')
        sys.exit(-2)

        queueWatcherThread.join()
        socketIOThread.join()
Exemplo n.º 12
0
    def exec(self):
        logging.info('Start to find requests')
        visited = set()
        session = HTMLSession()
        queue = SimpleQueue()
        queue.put_nowait(self.args['url'])
        visited.add(self.args['url'])
        count = 0
        while not queue.empty():
            count += 1
            if count > self.args['max_page_count']:
                break
            url = queue.get_nowait()
            logging.info('Request on {}'.format(url))
            r = session.get(url, cookies=self.cookies)
            links = r.html.absolute_links
            for link in links:

                def is_exclude():
                    for exc in self.args['exclude']:
                        if link.startswith(exc):
                            return True
                    return False
                if link.startswith(self.args['url'])\
                        and not is_exclude()\
                        and link not in visited:
                    visited.add(link)
                    queue.put_nowait(link)
            forms = r.html.find('form')
            for form in forms:
                request = {
                    'uuid':
                    randuuid(),
                    'location':
                    url,
                    'url':
                    urljoin(r.url, form.attrs['action'])
                    if 'action' in form.attrs else url,
                    'method':
                    form.attrs['method'].upper()
                    if 'method' in form.attrs else 'GET',
                    'content-type':
                    form.attrs['enctype'] if 'enctype' in form.attrs else
                    'application/x-www-form-urlencoded',
                    'fields': {},
                    'form':
                    form
                }
                inputs = form.find('input')
                for inp in inputs:
                    if 'name' not in inp.attrs:
                        continue
                    typ = inp.attrs['type'] if 'type' in inp.attrs else 'text'
                    name = inp.attrs['name']
                    value = inp.attrs['value'] if 'value' in inp.attrs else ''
                    required = True if 'required' in inp.attrs else False
                    if typ == 'radio':
                        if name in request['fields']:
                            request['fields'][name]['values'].append(value)
                        else:
                            request['fields'][name] = {
                                'type': 'radio',
                                'name': name,
                                'required': required,
                                'values': [value]
                            }
                    elif typ == 'checkbox':
                        checked = True if 'checked' in inp.attrs and (
                            inp.attrs['checked'] == 'checked'
                            or inp.attrs['checked'] == '') else False
                        request['fields'][name] = {
                            'type': 'checkbox',
                            'name': name,
                            'required': required,
                            'value': value,
                            'checked': checked
                        }
                    else:
                        request['fields'][name] = {
                            'type': typ,
                            'name': name,
                            'required': required,
                            'default': value
                        }
                textareas = form.find('textarea')
                for textarea in textareas:
                    if 'name' not in textarea.attrs:
                        continue
                    name = textarea.attrs['name']
                    request['fields'][name] = {
                        'type': 'textarea',
                        'name': name,
                        'required':
                        True if 'required' in textarea.attrs else False,
                        'default': textarea.text
                    }
                selects = form.find('select')
                for select in selects:
                    if 'name' not in select.attrs:
                        continue
                    name = select.attrs['name']
                    multiple = True if 'multiple' in select.attrs else False
                    required = True if 'required' in select.attrs else False
                    values = []
                    options = select.find('option')
                    for option in options:
                        if 'value' not in option.attrs:
                            continue
                        values.append(option.attrs['value'])
                    request['fields'][name] = {
                        'type': 'select',
                        'name': name,
                        'multiple': multiple,
                        'required': required,
                        'values': values
                    }
                logging.info('Found form {}'.format(request['uuid']))
                self.requests[request['uuid']] = request
        self.results['requests'] = self.requests
        self.results['urls'] = list(visited)
        logging.info('Found {} forms'.format(len(self.results['requests'])))
Exemplo n.º 13
0
class Producer:

    def __init__(self, *topics, broker, schema_registry, schema, logging_enabled = False):
        """
        Initialization of the Producer which instatiates an AvroProducer class 

        Parameters
        ----------
        broker: str
            The URL of the broker (example: 'localhost:9092')
        schema_registry: str
            The URL of the confluent Schema Registry endpoint (example: 'http://localhost:8081')
        schema: str
            The default AVRO schema to use to serialize messages
        logger: Logger object, optional
            The logger object which will be used to log messages if provided
        topics
            variable length argument list of the string names of topics to produce too
        """
        self.schema = avro.loads(schema)
        self.__producer = AvroProducer(
            {
                "bootstrap.servers": broker,
                "schema.registry.url": schema_registry
            },
            default_key_schema=self.schema
        )
        if logging_enabled:
            self.logger = logging.getLogger(__name__)
        else:
            self.logger = None
        self.topics = topics
        self.produce_flag = True
        self.production_last_stoped = 0
        self.total_time_producing_stoped = 0
        self.__msg_queue = SimpleQueue()
    
    def produce(self, msg, schema = None, callback = None):
        """
        Write a message to confluent kafka using
        the instatiated AvroProducer to serialize
        
        Parameters
        ----------
        msg: str
            The message to be serialized and sent 
        schema: str, Optional
            An optional schema to overide the default 
            set in the constructor
        callback: Function object, Optional
            An optional callback which will be executed 
            whether the producing of the message fails or
            succeeds. This function must take two parameters
            the first for the error and the second for the message
            (https://docs.confluent.io/current/clients/confluent-kafka-python/#producer)     
        """
        # TODO: function partials for better readability ?
        # SOLVED: created dictionary to expand to parameters
        params = {}
        params["value"] = msg
        if schema is not None:
            params["value_schema"] = schema
        else:
            params["value_schema"] = self.schema
        if callback is not None:
            params["on_delivery"] = callback
        for topic in self.topics:
            params["topic"] = topic
            self.__msg_queue.put_nowait(params)
        try:
            while not self.__msg_queue.empty():
                msg = self.__msg_queue.get_nowait()
                self.__producer.produce(**msg)
                self.__producer.flush()
                self.produce_flag = True
                self.production_last_stoped = 0
        
        except SerializerError as e:
            self.__log_msg(
                "ERROR",
                "Message deserialization has failed {}: {} \n".format(msg,e),
                "See the following trace back \n {}".format(traceback.format_exc())
            )
        except BufferError as e:

            if self.produce_flag or (
                self.production_last_stoped != 0 and ((time.time() - self.production_last_stoped) >= 3600)
            ):
                self.produce_flag = False
                self.production_last_stoped = time.time()
                self.total_time_producing_stoped += time.time() - self.production_last_stoped
                self.__log_msg(
                    "Queue Buffer has reached its maximum capacity, unable to deliver message {}: {}".format(msg,e),
                    "Message production will be shut down until messages can be resent",
                    f"Total time message producing has stopped {self.total_time_producing_stoped}",
                    level="CRITICAL",
                    delimeter="\n"
                )
        except KafkaException as e:
            self.__log_msg(
                "An unknown exception has occured specific to Kafka {}: {}".format(msg, e),
                level="ERROR",
                delimeter=""
            )
        except Exception as e:
            self.__log_msg(
                "An unknown exception has occured {}: {}".format(msg, e),
                f"See the following traceback {traceback.format_exc()}",
                level="ERROR",
                delimeter="\n"
            )
    def __enter__(self):
        """
        Context Manager for Producer, to allow custom actions for producing messages
        """
        return self.__producer
    
    def __exit__(self, *args):
        """
        On exit producer is flushed
        """
        self.__producer.flush()

    def __log_msg(self, *messages, level="NOTSET", delimeter= " "):
        levels = {
            "CRITICAL": logging.CRITICAL,
            "ERROR": logging.ERROR,
            "WARNING": logging.WARNING,
            "INFO": logging.INFO,
            "DEBUG": logging.DEBUG,
            "NOTSET": logging.NOTSET
        }
        msg = delimeter.join(messages)
        if self.logger is not None:
            if level not in levels:
                raise ValueError(
                    f"level {level} is not valid must be one of {list(levels.keys())}"
                )
            self.logger.log(
                levels[level],
                msg
            )
        else:
            if level is not None:
                print(f"LOGGED MESSAGE: {msg}")
            else:
                print(f"{level}: {msg}")
Exemplo n.º 14
0
class Port:
   def __init__(self, parser, portNotOpenException, PortExceptionType = None):
      self.__PortExceptionType = PortExceptionType or type(portNotOpenException)
      self.__autoOpenOnWrite = False
      self.__connectionListeners = set()
      self.__debugRead = False
      self.__debugWrite = False
      self.__errorProcessor = print
      self.__packet = None
      self.__parser = parser
      self.__path = None
      self.__portNotOpenException = portNotOpenException
      self.__queue = SimpleQueue()
      self.__throw = False
   
   @property
   def autoOpenOnWrite(self):
      return self.__autoOpenOnWrite
   
   @autoOpenOnWrite.setter
   def autoOpenOnWrite(self, autoOpenOnWrite):
      self.__autoOpenOnWrite = autoOpenOnWrite
   
   @property
   def debugRead(self):
      return self.__debugRead
   
   @debugRead.setter
   def debugRead(self, debugRead):
      self.__debugRead = debugRead
   
   @property
   def debugWrite(self):
      return self.__debugWrite
   
   @debugWrite.setter
   def debugWrite(self, debugWrite):
      self.__debugWrite = debugWrite
   
   @property
   def errorProcessor(self):
      return self.__errorProcessor
   
   @errorProcessor.setter
   def errorProcessor(self, errorProcessor):
      self.__errorProcessor = errorProcessor
   
   @property
   def parser(self):
      return self.__parser
   
   @property
   def path(self):
      return self.__path
   
   @path.setter
   def path(self, path):
      self.__path = path
   
   @property
   def throw(self):
      return self.__throw
   
   @throw.setter
   def throw(self, throw):
      self.__throw = throw
   
   def Packet(self, **kw):
      return Packet(self.__parser.format, **kw)
   
   def addConnectionListener(self, connectionListener):
      self.__addRemoveConnectionListener(True, connectionListener)
   
   def addQueueItem(self, item):
      self.__queue.put_nowait(item)
   
   def close(self):
      if self.isOpen():
         self._close()
   
   def isOpen(self):
      StaticUtils.notImplemented()
   
   def open(self, path = None, **kw):
      try:
         self._open(path, **kw)
         
         return True
      
      except self.__PortExceptionType as e:
         self._processError(e)
      
      finally:
         self.__throw = False
   
   def packet(self, **kw):
      self.__packet = self.Packet(**kw)
      
      return self
   
   def removeConnectionListener(self, connectionListener):
      self.__addRemoveConnectionListener(False, connectionListener)
   
   def processQueue(self):
      try:
         item = self.__queue.get_nowait()
         
         if isinstance(item, ConnectionEstablished):
            for connectionListener in self.__connectionListeners:
               connectionListener.connectionEstablished(self)
         
         elif isinstance(item, ConnectionLost):
            for connectionListener in self.__connectionListeners:
               connectionListener.connectionLost(self, item.e)
         
         elif isinstance(item, DataReceived):
            self.__parser.parse(item.data)
         
         elif isinstance(item, ProcessError):
            self.__errorProcessor(item.e)
         
         else:
            raise UnknownItem(item)
      
      except Empty:
         pass
   
   def write(self, packet = None, throw = None):
      if not packet:
         packet = self.__packet
      
      if throw is not None:
         self.__throw = throw
      
      try:
         if self.__debugWrite:
            print(packet)
         
         else:
            if not self.isOpen():
               if self.__autoOpenOnWrite:
                  self._open()
               
               else:
                  raise self.__portNotOpenException
            
            self._write(packet)
         
         return True
      
      except self.__PortExceptionType as e:
         self._processError(e)
      
      finally:
         self.__packet = None
         self.__throw = False
   
   def _close(self):
      StaticUtils.notImplemented()
   
   def _open(self, path = None, **kw):
      StaticUtils.notImplemented()
   
   def _processError(self, e):
      if self.__throw:
         raise e
      
      if self.__errorProcessor:
         self.addQueueItem(ProcessError(e))
   
   def _write(self, packet):
      StaticUtils.notImplemented()
   
   def __addRemoveConnectionListener(self, add, connectionListener):
      StaticUtils.assertInheritance(connectionListener, ConnectionListener, "connectionListener")
      
      getattr(self.__connectionListeners, "add" if add else "remove")(connectionListener)
Exemplo n.º 15
0
class C2Websocket:
    def __init__(self, url, user_name, connection_type, master_password=None):
        self.url = url
        self.user_name = user_name
        self.connection_type = connection_type
        self.master_password = master_password
        self.recv_queues = {}
        self.send_queue = SimpleQueue()
        self.connection_string = self.connect_to_websocket()

    def get_url_connection(self):
        return '{}?name={}&masterPassword={}&userType={}'.format(
            self.url, self.user_name, self.master_password,
            self.connection_type)

    def connect_to_websocket(self):
        ws = websocket.create_connection(self.get_url_connection())
        ws.settimeout(0.05)
        websocket_thread = threading.Thread(
            target=self._listen_websocket_messages, args=(ws, ))
        websocket_thread.start()
        return self.get_url_connection()

    def send_action(self,
                    action,
                    payload=None,
                    drop_queued_action_responses=False,
                    action_options=None):
        if drop_queued_action_responses:
            self.drop_queued_action_responses(action)
        action_to_send = {KEY_ACTION: action, KEY_PAYLOAD: payload}
        if action_options:
            for action_key, action_value in action_options.items():
                action_to_send[action_key] = action_value
        self.send_queue.put_nowait(json.dumps(action_to_send))

    def send_service(self,
                     client_id,
                     service,
                     payload,
                     drop_queued_service_responses=False):
        if drop_queued_service_responses:
            self.drop_queued_service_responses(ACTION_SEND_MESSAGE, client_id,
                                               service)
        payload[KEY_SERVICE] = service
        self.send_action(ACTION_SEND_MESSAGE,
                         payload,
                         action_options={KEY_SEND_MESSAGE_TO: client_id})

    def recv_action_response(self, action, block=True, timeout=None):
        try:
            return self.recv_queues[action].get(block=block, timeout=timeout)
        except KeyError:
            if block:
                self.recv_queues[action] = SimpleQueue()
                self.recv_action_response(action, block)
        except KeyboardInterrupt:
            return

    def recv_service_response(self,
                              client_id,
                              service,
                              block=True,
                              timeout=None):
        try:
            return self.recv_queues[(ACTION_SEND_MESSAGE, client_id,
                                     service)].get(block=block,
                                                   timeout=timeout)
        except KeyError:
            if block:
                self.recv_queues[(ACTION_SEND_MESSAGE, client_id,
                                  service)] = SimpleQueue()
                return self.recv_service_response(client_id, service, block)
        except KeyboardInterrupt:
            return

    def drop_queued_action_responses(self, action):
        self.recv_queues[action] = SimpleQueue()

    def drop_queued_service_responses(self, action, client_id, service):
        self.recv_queues[(action, client_id, service)] = SimpleQueue()

    def _add_service_message_to_recv_queue(self, action, client_id, service,
                                           payload):
        if (action, client_id, service) not in self.recv_queues:
            self.recv_queues[(action, client_id, service)] = SimpleQueue()
        self.recv_queues[(action, client_id, service)].put(payload)

    def _add_action_message_to_recv_queue(self, action, payload):
        if action not in self.recv_queues:
            self.recv_queues[action] = SimpleQueue()
        self.recv_queues[action].put(payload)

    def _listen_websocket_messages(self, ws):
        self.keep_listening = True
        try:
            while self.keep_listening:
                try:
                    message = ws.recv()
                    # print('Recv {} -> {}'.format(self.user_name, message))

                except websocket._exceptions.WebSocketTimeoutException:
                    if not self.send_queue.empty():
                        to_send = self.send_queue.get()
                        ws.send(to_send)
                        # print('Sent {} -> {}'.format(self.user_name, to_send))
                    continue

                try:
                    json_message = json.loads(message)
                except json.decoder.JSONDecodeError as e:
                    continue

                if KEY_ACTION not in json_message:
                    continue
                action = json_message[KEY_ACTION]

                if KEY_PAYLOAD in json_message and KEY_FROM in json_message and KEY_SERVICE in json_message[
                        KEY_PAYLOAD]:
                    self._add_service_message_to_recv_queue(
                        action, json_message[KEY_FROM],
                        json_message[KEY_PAYLOAD][KEY_SERVICE],
                        json_message[KEY_PAYLOAD])
                else:
                    self._add_action_message_to_recv_queue(
                        action, json_message[KEY_PAYLOAD])
            time.sleep(1)
            ws.close()
        except websocket._exceptions.WebSocketConnectionClosedException:
            self.connect_to_websocket()

    def stop(self):
        self.keep_listening = False