Example #1
0
    async def __run(self, ws):
        """Drains all messages from a WebSocket, sending them to the client's
        listeners.

        :param ws: WebSocket to drain.
        """
        q = anyio.create_queue(0)
        await self.taskgroup.spawn(self._check_runtime, q)

        async for msg in ws:
            if isinstance(msg, CloseConnection):
                break
            elif not isinstance(msg, TextMessage):
                log.warning("Unknown JSON message type: %s", repr(msg))
                continue  # ignore
            msg_json = json.loads(msg.data)
            if not isinstance(msg_json, dict) or 'type' not in msg_json:
                log.error("Invalid event: %s", msg)
                continue
            try:
                await q.put(msg_json)
                await self.process_ws(msg_json)
            finally:
                await q.put(None)
        await q.put(False)
Example #2
0
    def __init__(self, tg: anyio.abc.TaskGroup, config=None, plugin_namespace=None):
        self.logger = logging.getLogger(__name__)
        self.config = deepcopy(_defaults)
        if config is not None:
            self.config.update(config)
        self._build_listeners_config(self.config)

        self._servers = dict()
        self._init_states()
        self._sessions = dict()
        self._subscriptions = dict()

        self._broadcast_queue = anyio.create_queue(100)
        self._tg = tg
        self._do_retain = self.config.get("retain", True)
        if self._do_retain:
            self._retained_messages = dict()

        # Init plugins manager
        context = BrokerContext(self, self.config)
        if plugin_namespace:
            namespace = plugin_namespace
        else:
            namespace = "distmqtt.broker.plugins"
        self.plugins_manager = PluginManager(tg, namespace, context)
Example #3
0
async def worker(port, queue, num_concurrent_streams, num_requests_per_stream,
                 num_rounds, message_size, load_type):
    async with purerpc.insecure_channel("localhost", port) as channel:
        stub = GreeterStub(channel)
        if load_type == "unary":
            load_fn = do_load_unary
        elif load_type == "stream":
            load_fn = do_load_stream
        else:
            raise ValueError(f"Unknown load type: {load_type}")
        for _ in range(num_rounds):
            start = time.time()
            task_results = anyio.create_queue(sys.maxsize)
            async with anyio.create_task_group() as task_group:
                for _ in range(num_concurrent_streams):
                    await task_group.spawn(load_fn, task_results, stub,
                                           num_requests_per_stream,
                                           message_size)
            end = time.time()
            rps = num_concurrent_streams * num_requests_per_stream / (end -
                                                                      start)
            queue.put(rps)
            results = []
            for _ in range(num_concurrent_streams):
                results.append(await task_results.get())
            queue.put(results)
        queue.close()
        queue.join_thread()
Example #4
0
async def worker(port, num_concurrent_streams, num_requests_per_stream,
                 num_rounds, message_size, load_type):
    async with purerpc.insecure_channel("localhost", port) as channel:
        stub = GreeterStub(channel)
        if load_type == "unary":
            load_fn = do_load_unary
        elif load_type == "stream":
            load_fn = do_load_stream
        else:
            raise ValueError(f"Unknown load type: {load_type}")
        for idx in range(num_rounds):
            start = time.time()
            task_results = anyio.create_queue(sys.maxsize)
            async with anyio.create_task_group() as task_group:
                for _ in range(num_concurrent_streams):
                    await task_group.spawn(load_fn, task_results, stub, num_requests_per_stream, message_size)
            end = time.time()

            rps = num_concurrent_streams * num_requests_per_stream / (end - start)

            latencies = []
            for _ in range(num_concurrent_streams):
                latencies.append(await task_results.get())

            print("Round", idx, "rps", rps, "avg latency", 1000 * sum(latencies) / len(latencies))
Example #5
0
    def __init__(self,
                 plugins_manager: PluginManager,
                 session: Session = None):
        self.logger = logging.getLogger(__name__ + "." +
                                        self.__class__.__name__)
        if session:
            self._init_session(session)
        else:
            self.session = None
        self.stream = None
        self.plugins_manager = plugins_manager
        self._tg = plugins_manager._tg

        self._reader_task = None
        self._sender_task = None
        self._reader_stopped = anyio.create_event()
        self._sender_stopped = anyio.create_event()
        self._send_q = anyio.create_queue(10)

        self._puback_waiters = dict()
        self._pubrec_waiters = dict()
        self._pubrel_waiters = dict()
        self._pubcomp_waiters = dict()

        self._disconnecting = False
        self._disconnect_waiter = None
        self._write_lock = anyio.create_lock()
Example #6
0
    def __init__(self,
                 tg: anyio.abc.TaskGroup,
                 config=None,
                 plugin_namespace=None):
        self.logger = logging.getLogger(__name__)
        self.config = _defaults
        if config is not None:
            self.config.update(config)
        self._build_listeners_config(self.config)

        self._servers = dict()
        self._init_states()
        self._sessions = dict()
        self._subscriptions = dict()
        self._retained_messages = dict()
        self._broadcast_queue = anyio.create_queue(9999)
        self._tg = tg

        # Init plugins manager
        context = BrokerContext(self)
        context.config = self.config
        if plugin_namespace:
            namespace = plugin_namespace
        else:
            namespace = 'distmqtt.broker.plugins'
        self.plugins_manager = PluginManager(tg, namespace, context)
Example #7
0
 def __init__(self, conn, command, params, seq, expect_body):
     self._conn = conn
     self._command = command
     self._params = params
     self.seq = seq
     self.q = anyio.create_queue(10000)
     self.expect_body = -expect_body
Example #8
0
 async def test_queue(self):
     queue = create_queue(1)
     assert queue.empty()
     await queue.put('1')
     assert queue.full()
     assert queue.qsize() == 1
     assert await queue.get() == '1'
     assert queue.empty()
Example #9
0
 def __init__(self,
              plugins_manager: PluginManager,
              session: Session = None):
     super().__init__(plugins_manager, session)
     self._ping_task = None
     self._pingresp_queue = anyio.create_queue(9999)
     self._subscriptions_waiter = dict()
     self._unsubscriptions_waiter = dict()
     self._disconnect_waiter = None
Example #10
0
    def __init__(self, plugins_manager):
        self._init_states()
        self._plugins_manager = plugins_manager
        self.remote_address = None
        self.remote_port = None
        self.client_id = None
        self.clean_session = None
        self.will_flag = False
        self.will_message = None
        self.will_qos = None
        self.will_retain = None
        self.will_topic = None
        self.keep_alive = 0
        self.publish_retry_delay = 0
        self.broker_uri = None
        self.username = None
        self.password = None
        self.cafile = None
        self.capath = None
        self.cadata = None
        self._packet_id = 0
        self.parent = 0

        self.logger = logging.getLogger(__name__)

        # Used to store outgoing ApplicationMessage while publish protocol flows
        self.inflight_out = OrderedDict()

        # Used to store incoming ApplicationMessage while publish protocol flows
        self.inflight_in = OrderedDict()

        # Stores messages retained for this session
        self.retained_messages = anyio.create_queue(9999)

        # Stores PUBLISH messages ID received in order and ready for application process
        self._delivered_message_queue = anyio.create_queue(9999)

        # The actual delivery process
        self._delivery_task = None
        self._delivery_stopped = anyio.create_event()

        # The broker we're attached to
        self._broker = None
Example #11
0
    async def run_sub(self, chips, started: anyio.abc.Event = None):
        """Task handler for processing this output."""
        self.queue = anyio.create_queue(1)
        self.reply_queue = anyio.create_queue(1)

        chip = chips.add(self.chip)
        pin = chip.line(self.pin)
        with pin.open(direction=gpio.DIRECTION_OUTPUT,
                      flags=self.flags) as line:
            if started is not None:
                await started.set()
            while True:
                m = await self.queue.get()
                line.value = 1
                try:
                    await anyio.sleep(self.on_time)
                finally:
                    line.value = 0
                await anyio.sleep(self.off_time)
                await self.reply_queue.put(None)
Example #12
0
 def __init__(self, grpc_connection: GRPCConnection, stream_id: int,
              socket: SocketWrapper, grpc_socket: "GRPCSocket"):
     self._stream_id = stream_id
     self._grpc_connection = grpc_connection
     self._grpc_socket = grpc_socket
     self._socket = socket
     self._flow_control_update_event = anyio.create_event()
     self._incoming_events = anyio.create_queue(sys.maxsize)
     self._response_started = False
     self._state = GRPCStreamState.OPEN
     self._start_stream_event = None
     self._end_stream_event = None
Example #13
0
 def __init__(self, service, host="localhost", port=4304):
     self.service = service
     self.host = host
     self.port = port
     self.stream = None
     self._msg_proto = None
     self.requests = deque()
     self._wqueue = anyio.create_queue(100)
     self._scan_task = None
     self._buses = dict()  # path => bus
     self._scan_lock = anyio.create_lock()
     self._scan_args = {}
Example #14
0
    async def watch(self):
        """
        An async context manager that returns an iterator for changes of
        this pin.

        Values are (out,level) tuples of bool, with "out" and "high"
        represented as True.
        """
        q = anyio.create_queue(10)
        self.mon.add(q)
        try:
            yield q
        finally:
            self.mon.remove(q)
Example #15
0
    async def test_get_cancel(self):
        async def task():
            nonlocal local_scope
            async with open_cancel_scope() as local_scope:
                await queue.get()

        local_scope = None
        queue = create_queue(1)
        async with create_task_group() as tg:
            await tg.spawn(task)
            await wait_all_tasks_blocked()
            await local_scope.cancel()
            await queue.put(None)

        assert queue.full()
Example #16
0
async def _aio_gather_iter_pairs(*aio_tasks):
    """Spawn async tasks and yield with pairs of ids with results."""
    aio_tasks_num = len(aio_tasks)
    task_res_q = create_queue(aio_tasks_num)

    async with all_subtasks_awaited() as task_group:
        for task_id, task in enumerate(aio_tasks):
            await task_group.spawn(
                _send_task_res_to_q,
                task_res_q,
                task_id,
                task,
            )

        for _ in range(aio_tasks_num):
            yield await task_res_q.get()
Example #17
0
    async def test_get_iter(self):
        async def task():
            nonlocal total
            async for msg in queue:
                if msg is None:
                    return
                else:
                    total += msg

        total = 0
        queue = create_queue(1)
        async with create_task_group() as tg:
            await tg.spawn(task)
            await queue.put(1)
            await queue.put(2)
            await queue.put(3)
            await queue.put(None)

        assert queue.empty()
        assert total == 6
Example #18
0
    async def run(self,
                  amqp,
                  chips,
                  taskgroup,
                  started: anyio.abc.Event = None):
        """Task handler for processing this output."""
        chip = chips.add(self.chip)
        async with amqp.new_channel() as chan:
            await chan.exchange_declare(self.exch,
                                        self.exch_type,
                                        durable=True)
            pin = chip.line(self.pin)
            with chip.line(self.pin).monitor(gpio.REQUEST_EVENT_BOTH_EDGES,
                                             flags=self.flags) as pin:
                q = anyio.create_queue(0)
                await taskgroup.spawn(self.debouncer, chan, q)
                if started is not None:
                    await started.set()

                logger.debug("Mon started %s %s %d", self.name, self.chip,
                             self.pin)
                async for evt in pin:
                    logger.debug("see %s %s", self.name, evt)
                    await q.put(evt)
Example #19
0
    async def _reconnect(self):
        await self.service.push_event(ServerDisconnected(self))
        await self._write_task.cancel()
        self._write_task = None
        if self._scan_task is not None:
            await self._scan_task.cancel()
            self._scan_task = None
        await self.stream.close()
        self.stream = None

        backoff = 0.2

        while True:
            try:
                self.stream = await anyio.connect_tcp(self.host, self.port)
            except OSError:
                await anyio.sleep(backoff)
                if backoff < 10:
                    backoff *= 1.5
            except BaseException as exc:
                logger.exception("Owch")
            else:
                self._msg_proto = MessageProtocol(self, is_server=False)
                # re-send messages, but skip those that have been cancelled
                logger.warning("Server %s restarting", self.host)
                ml, self.requests = list(self.requests), deque()
                self._wqueue = anyio.create_queue(100)
                await self.service.push_event(ServerConnected(self))
                v_w = ValueEvent()
                await self.service.nursery.spawn(self._writer, v_w)
                self._write_task = await v_w.get()
                for msg in ml:
                    if not msg.cancelled:
                        await self._wqueue.put(msg)
                await self.service.nursery.spawn(partial(self.start_scan,**self._scan_args))
                return
Example #20
0
 def create_queue(self, max_size: int) -> anyio.Queue:
     return anyio.create_queue(max_size)
Example #21
0
 async def __aenter__(slf):
     assert self._event_queue is None
     self._event_queue = anyio.create_queue(1000)  # bus events
     return slf
Example #22
0
 def __init__(self, server):
     self.server = server
     self._recv_q = anyio.create_queue(1)
     self._buffer = b""
Example #23
0
 async def _task_setup(self):
     assert self._q is None, self._q
     self._q = anyio.create_queue(20)
Example #24
0
 def __setstate(self, state):
     self.__dict__.update(state)
     self.retained_messages = anyio.create_queue(9999)
     self._delivered_message_queue = anyio.create_queue(9999)
Example #25
0
 async def __aenter__(self):
     self._q = anyio.create_queue(QSIZE)
     await self.client._subscribe(self)
     return self
Example #26
0
 async def test_zero_capacity(self):
     """Ensure that max_size=0 creates an infinite capacity queue on all backends."""
     queue = create_queue(0)
     assert not queue.full()
Example #27
0
 def __init__(self, capacity: int):
     capacity += 1  # Leave room for `Break` item.
     self._queue = anyio.create_queue(capacity)
     self._break = object()
Example #28
0
 def open(self):
     self.q = anyio.create_queue(10)
     log.debug("ADD %s", self.event_type)
     self.client.event_listeners.setdefault(self.event_type,
                                            list()).append(self)
Example #29
0
 def __init__(self,
              plugins_manager: PluginManager,
              session: Session = None):
     super().__init__(plugins_manager, session)
     self._pending_subscriptions = anyio.create_queue(9999)
     self._pending_unsubscriptions = anyio.create_queue(9999)
Example #30
0
 async def __aenter__(self):
     if self.q is None:
         self.q = anyio.create_queue(self._qlen + 2)
     return await super().__aenter__()