Пример #1
0
    def __init__(
        self,
        transport: Transport,
        handshake: ServerHandshake,
        organization_id: OrganizationID,
        device_id: DeviceID,
        human_handle: Optional[HumanHandle],
        profile: UserProfile,
        public_key: PublicKey,
        verify_key: VerifyKey,
    ):
        super().__init__(transport, handshake)
        self.organization_id = organization_id
        self.profile = profile
        self.device_id = device_id
        self.human_handle = human_handle
        self.public_key = public_key
        self.verify_key = verify_key

        self.event_bus_ctx = None  # Overwritten in BackendApp.handle_client
        self.channels = trio.open_memory_channel(100)
        self.realms = set()
        self.events_subscribed = False

        self.conn_id = self.transport.conn_id
        self.logger = self.transport.logger = self.transport.logger.bind(
            conn_id=self.conn_id,
            handshake_type=self.handshake_type.value,
            organization_id=self.organization_id,
            device_id=self.device_id,
        )
Пример #2
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self.controller = Controller()
     self.reader = None  # type: asyncio.StreamReader
     self.writer = None  # type: asyncio.StreamWriter
     self._replies_in, self._replies_out = trio.open_memory_channel(10)
     self.controller.reset(True)
Пример #3
0
async def main():
    global attributes, password_options, spinner
    mainspinner = Halo(text='yamit Importing ', spinner='dots')
    mainspinner.start()
    with open(csv_file, 'r') as f:
        c = csv.reader(f, delimiter=',')
        mainspinner.succeed("Fetching attributes...")
        mainspinner.start()
        for row in c:
            attributes = row
            password_options = attributes
            if 'password' in attributes:
                pw_ind = attributes.index('password')
                attributes = attributes[:pw_ind]
            break
        f.seek(0)
        f.close()

    check_atr()
    mainspinner.succeed("Compared attributes to Okta user schema...")
    async with trio.open_nursery() as nursery:
        send_channel, receive_channel = trio.open_memory_channel(0)
        nursery.start_soon(csv_emitter, send_channel)

        spinner.start()
        for i in range(0, N):
            nursery.start_soon(worker, [
                f'/api/v1/users?activate={activate}',
                receive_channel.clone()
            ])
Пример #4
0
 def listen(self, *event_types, buffer_size=10):
     ''' Return an async iterator that iterates over events matching the
     indicated types. '''
     sender, receiver = trio.open_memory_channel(buffer_size)
     for event_type in event_types:
         self.channels[event_type].add(sender)
     return receiver
Пример #5
0
    async def _handle_lifespan(self, scope: Scope, receive: Callable,
                               send: Callable) -> None:
        import trio

        self.app_queues = {
            path: trio.open_memory_channel(MAX_QUEUE_SIZE)
            for path in self.mounts
        }
        self.startup_complete = {path: False for path in self.mounts}
        self.shutdown_complete = {path: False for path in self.mounts}

        async with trio.open_nursery() as nursery:
            for path, app in self.mounts.items():
                nursery.start_soon(
                    invoke_asgi,
                    app,
                    scope,
                    self.app_queues[path][1].receive,
                    partial(self.send, path, send),
                )

            while True:
                message = await receive()
                for channels in self.app_queues.values():
                    await channels[0].send(message)
                if message["type"] == "lifespan.shutdown":
                    break
Пример #6
0
 async def resolve(**dnsquery):
     global stats_requests, stats_names, stat_res
     nonlocal nursery
     stats_requests += 1
     stat_res = dnsquery
     resolvers = [
         rediscache,
         *sorted(nclients, key=lambda nc: stats_time[nc.name] or 1)
     ]
     stats_names = [r.name for r in resolvers]
     # RedisCache and Cloudflare cannot answer type ANY requests
     type_any = dnsquery['type'] == 255
     if type_any:
         resolvers = [
             r for r in resolvers
             if r.name not in ("RedisCache", "cloudflare")
         ]
     sender, receiver = trio.open_memory_channel(len(nclients))
     async with sender, receiver:
         # Staggered startups of resolving tasks on each suitable provider
         nursery.start_soon(resolve_happy, resolvers, dnsquery, nursery,
                            sender.clone())
         fastest = None
         # Timeout for answering downstream requests
         with trio.move_on_after(5 if type_any else 0.95):
             fastest = await receiver.receive()
             sender.send_nowait(fastest)  # Put the fastest back for cacher
         # Cache any received answers
         nursery.start_soon(cacher_task, receiver.clone())
     if fastest:
         statkey = fastest["NameClient"]
         stat_res = fastest
         stats_fastest[statkey] += 1
         return fastest
     raise trio.TooSlowError
Пример #7
0
async def spawn_child_nursery(spawn, shutdown_timeout=math.inf):
    send_channel, receive_channel = trio.open_memory_channel(0)
    async with receive_channel:
        shutdown_trigger = Event()
        spawn(_run_nursery_until_event, send_channel, shutdown_trigger,
              shutdown_timeout)
        return await receive_channel.receive(), shutdown_trigger
Пример #8
0
    async def wrapper(*args, **kwargs):
        send_channel, receive_channel = trio.open_memory_channel(0)
        async with trio.open_nursery() as nursery:

            async def adapter():
                async with send_channel, aclosing(wrapped(*args,
                                                          **kwargs)) as agen:
                    while True:
                        try:
                            # Advance underlying async generator to next yield
                            value = await agen.__anext__()
                        except StopAsyncIteration:
                            break
                        while True:
                            try:
                                # Forward the yielded value into the send channel
                                try:
                                    await send_channel.send(value)
                                except trio.BrokenResourceError:
                                    return
                                break
                            except BaseException:  # pylint: disable=broad-except
                                # If send_channel.send() raised (e.g. Cancelled),
                                # throw the raised exception back into the generator,
                                # and get the next yielded value to forward.
                                try:
                                    value = await agen.athrow(*sys.exc_info())
                                except StopAsyncIteration:
                                    return

            nursery.start_soon(adapter, name=wrapped)
            async with receive_channel:
                yield receive_channel
Пример #9
0
    async def request(self, receiver_node_id: NodeID,
                      message: BaseMessage) -> IncomingMessage:
        response_channels: Tuple[
            SendChannel[IncomingMessage],
            ReceiveChannel[IncomingMessage], ] = trio.open_memory_channel(0)
        response_send_channel, response_receive_channel = response_channels

        async with self.add_response_handler(
                receiver_node_id,
                message.request_id,
        ) as response_subscription:
            outgoing_message = await self.prepare_outgoing_message(
                receiver_node_id, message)
            self.logger.debug(
                "Sending %s to %s with request id %d",
                outgoing_message,
                encode_hex(receiver_node_id),
                message.request_id,
            )
            await self.outgoing_message_send_channel.send(outgoing_message)
            response = await response_subscription.receive()
            self.logger.debug(
                "Received %s from %s with request id %d as response to %s",
                response,
                outgoing_message,
                encode_hex(receiver_node_id),
                message.request_id,
            )
            return response
Пример #10
0
def open_memory_channel(max_items):
    '''Wrapper around trio.open_memory_channel, which patches the send channel
    for queue-like compatibility'''
    send, recv = trio.open_memory_channel(max_items)
    # monkey-patch here for compatibility with a regular queue:
    send.put = send.send
    return TrioMemoryChannelPair(send, recv)
Пример #11
0
 async def request(self, name, arguments=None):
     sender, receiver = trio.open_memory_channel(0)
     await self.sender.send(
         dict(sender=sender, name=name, arguments=arguments))
     async with receiver:
         async for response in receiver:
             return response
Пример #12
0
    def add_request_handler(
        self,
        message_type: int,
    ) -> ChannelHandlerSubscription[IncomingMessage]:
        if message_type in self.request_handler_send_channels:
            raise ValueError(
                f"Request handler for type {message_type} is already added")

        request_channels: Tuple[
            SendChannel[IncomingMessage],
            ReceiveChannel[IncomingMessage], ] = trio.open_memory_channel(0)
        self.request_handler_send_channels[message_type] = request_channels[0]

        self.logger.debug("Adding request handler for message type %d",
                          message_type)

        def remove() -> None:
            try:
                self.request_handler_send_channels.pop(message_type)
            except KeyError:
                raise ValueError(
                    f"Request handler for type {message_type} has already been removed"
                )
            else:
                self.logger.debug(
                    "Removing request handler for message type %d",
                    message_type)

        return ChannelHandlerSubscription(
            send_channel=request_channels[0],
            receive_channel=request_channels[1],
            remove_fn=remove,
        )
Пример #13
0
def get_orders(
    emsd_uid: tuple[str, str] = None
) -> OrderBook:
    """"
    OrderBook singleton factory per actor.

    """
    if emsd_uid is not None:
        # TODO: read in target emsd's active book on startup
        pass

    global _orders

    if _orders is None:
        size = 100
        tx, rx = trio.open_memory_channel(size)
        brx = broadcast_receiver(rx, size)

        # setup local ui event streaming channels for request/resp
        # streamging with EMS daemon
        _orders = OrderBook(
            _to_ems=tx,
            _from_order_book=brx,
        )

    return _orders
Пример #14
0
async def build_cm_and_channel(store_path, pebble):
    send, recv = trio.open_memory_channel(1)
    store = FilesystemStorageProvider(store_path)
    cm = await CertManager.build(
        store, send, pebble.dir_url, "*****@*****.**", verify_ssl=False
    )
    return cm, recv
Пример #15
0
async def run(player_process):

    with trio.CancelScope() as cancel_scope:
        async with trio.open_nursery() as nursery:

            send_channel, receive_channel = trio.open_memory_channel(0)

            idle()
            nursery.start_soon(monitor_occupancy, send_channel)

            while True:
                transition = await receive_channel.receive()
                # print(f"received transition: {transition}")

                if transition == "start":

                    print("starting ...")
                    nursery.start_soon(start_session)

                elif transition == "stop":

                    print("stopping ...")
                    cancel_scope.cancel()
                    terminate(player_process)
                    break
Пример #16
0
    def init(self, threads=1):
        '''Start worker threads'''

        self.trio_token = trio.hazmat.current_trio_token()
        self.to_upload = trio.open_memory_channel(0)
        for _ in range(threads):
            t = threading.Thread(target=self._upload_loop)
            t.start()
            self.upload_threads.append(t)

        self.to_remove = Queue(1000)
        with self.backend_pool() as backend:
            has_delete_multi = backend.has_delete_multi

        if has_delete_multi:
            t = threading.Thread(target=self._removal_loop_multi)
            t.daemon = True  # interruption will do no permanent harm
            t.start()
            self.removal_threads.append(t)
        else:
            for _ in range(20):
                t = threading.Thread(target=self._removal_loop_simple)
                t.daemon = True  # interruption will do no permanent harm
                t.start()
                self.removal_threads.append(t)
Пример #17
0
async def test_receive_channel_clone_and_close():
    s, r = open_memory_channel(10)

    r2 = r.clone()
    r3 = r.clone()

    s.send_nowait(None)
    await r.aclose()
    with r2:
        pass

    with pytest.raises(trio.ClosedResourceError):
        r.clone()

    with pytest.raises(trio.ClosedResourceError):
        r2.clone()

    # Can still send, r3 is still open
    s.send_nowait(None)

    await r3.aclose()

    # But now the receiver is really closed
    with pytest.raises(trio.BrokenResourceError):
        s.send_nowait(None)
Пример #18
0
 async def execf(self, tid, res, fn):
     with self.wg:
         self.resultq[tid], rq = trio.open_memory_channel(0)
         try:
             await self._c.send(
                 dict(tid=tid, res=res, func=pickle_dumps(fn)))
             with optional_cm(trio.fail_after,
                              getattr(res, 'T', -180) +
                              180):  # 3min grace period
                 ok, r = await rq.receive()
         except (trio.ClosedResourceError, trio.EndOfChannel):
             # TODO: dedicated error class?
             ok, r = False, (
                 "",
                 RuntimeError(
                     f"remote {self.name} closed connection unexpectedly"))
         except trio.TooSlowError:
             log_event("lost_or_late_response")
             ok, r = False, (
                 "",
                 trio.TooSlowError(
                     f"remote {self.name} lost track of job {tid}"))
         if ok: self.health = FULL_HEALTH
         del self.resultq[tid]
         return ok, r
async def to_process_map_as_completed(
    sync_fn,
    job_aiter,
    cancellable=False,
    limiter=None,
    *,
    task_status,
):
    if limiter is None:
        limiter = trio_parallel.current_default_worker_limiter()
    send_chan, recv_chan = trio.open_memory_channel(0)
    task_status.started(recv_chan)

    async def worker(job_item, task_status):
        # Backpressure: hold limiter for entire task to avoid
        # spawning too many workers
        async with limiter:
            task_status.started()
            result = await trio_parallel.run_sync(
                sync_fn,
                *job_item,
                cancellable=cancellable,
                limiter=trio.CapacityLimiter(1),
            )
            await send_chan.send(result)

    async with send_chan, trio.open_nursery() as nursery:
        async for job_item in job_aiter:
            await nursery.start(worker, job_item)
Пример #20
0
    async def refine(self, input, output):
        async with trio.open_nursery() as nursery:
            if input:
                source = input
            elif self.source:
                source = self.source
            else:
                raise RuntimeError('No input provided.')

            send_channel, receive_channel = trio.open_memory_channel(0)

            async def pull_task():
                async with send_channel, aclosing(source) as aiter:
                    async for item in aiter:
                        await send_channel.send(item)

            nursery.start_soon(pull_task)

            while True:
                buffer = []
                try:
                    self._add_item(await receive_channel.receive(), buffer)
                    with trio.move_on_after(self.interval):
                        while True:
                            if len(buffer) == self.max_size:
                                break
                            self._add_item(await receive_channel.receive(),
                                           buffer)
                except trio.EndOfChannel:
                    if buffer:
                        await output(self._process_result(buffer))
                    break
                await output(self._process_result(buffer))
Пример #21
0
    async def main():
        async with trio.open_nursery() as nursery:
            # Get our CTRL-C handler, tunnel, and trio channels running
            nursery.start_soon(control_c_handler, nursery)

            send_channel, receive_channel = trio.open_memory_channel(0)
            async with send_channel, receive_channel:
                # Start the configtunnel listener
                # nursery.start_soon(tunnel.listen, send_channel.clone())
                # Start the database listener
                nursery.start_soon(manager.db.watchConfig, send_channel.clone())

                everything_else = await nursery.start(
                    partial(manager.activate, user="******")
                )
                # Respond to incoming updates
                async for command in receive_channel:
                    print("config command '{!r}' received".format(command))

                    everything_else.cancel()  # clean tasks
                    manager.processList = dict()  # clean listener objects

                    print("Reconfiguring Replay Manager: ", command)
                    everything_else = await nursery.start(
                        partial(
                            manager.activate,
                            updateSniffer="sniff" in command,
                            updateOpenPorts="ports" in command,
                            user=command[-1] if "user" in command else "system",
                        )
                    )
Пример #22
0
    async def request_response_subscription(
        self,
        receiver_node_id: NodeID,
        message: BaseMessage,
        endpoint: Optional[Endpoint] = None,
    ) -> AsyncGenerator[IncomingMessageSubscription, None]:
        if endpoint is None:
            endpoint = await self.get_endpoint_from_node_db(receiver_node_id)

        response_channels: Tuple[
            SendChannel[IncomingMessage],
            ReceiveChannel[IncomingMessage], ] = trio.open_memory_channel(0)
        response_send_channel, response_receive_channel = response_channels

        async with self.add_response_handler(
                receiver_node_id,
                message.request_id,
        ) as response_subscription:
            outgoing_message = OutgoingMessage(
                message=message,
                receiver_node_id=receiver_node_id,
                receiver_endpoint=endpoint,
            )
            self.logger.debug(
                "Sending %s to %s with request id %d",
                outgoing_message,
                encode_hex(receiver_node_id),
                message.request_id,
            )
            await self.outgoing_message_send_channel.send(outgoing_message)
            yield response_subscription
Пример #23
0
 def __init__(self):
     self._motor_left = 0
     self._motor_right = 0
     self._motor_flipper = 0
     self._rover_data_to_memory_channel = {
         i: trio.open_memory_channel(0) for i in ROVER_DATA_ELEMENTS.keys()
     }
Пример #24
0
        async def open(self):
            self.send, self.recv = trio.open_memory_channel(
                max_buffer_size=self.max_nums)

            # Init available numbers
            for i in range(self.max_nums):
                await self.release(i)
Пример #25
0
    def filter(self,
               rule,
               *,
               channel: Optional[trio.MemorySendChannel] = None,
               bufsize=1):
        """Create a filter for incoming messages

        Usage::

            async with router.filter(rule) as receive_channel:
                matching_msg = await receive_channel.receive()

            # OR:
            send_chan, recv_chan = trio.open_memory_channel(1)
            async with router.filter(rule, channel=send_chan):
                matching_msg = await recv_chan.receive()

        If the channel fills up,
        The sending end of the channel is closed when leaving the ``async with``
        block, whether or not it was passed in.

        :param jeepney.MatchRule rule: Catch messages matching this rule
        :param trio.MemorySendChannel channel: Send matching messages here
        :param int bufsize: If no channel is passed in, create one with this size
        """
        if channel is None:
            channel, recv_channel = trio.open_memory_channel(bufsize)
        else:
            recv_channel = None
        return TrioFilterHandle(self._filters, rule, channel, recv_channel)
Пример #26
0
    async def main():

        # make sure it all works within the runtime
        async with tractor.open_root_actor():

            tx, rx = trio.open_memory_channel(1)
            brx = broadcast_receiver(rx, 1)
            cs = trio.CancelScope()

            async def sub_and_recv():
                with cs:
                    async with brx.subscribe() as bc:
                        async for value in bc:
                            print(value)

            async def cancel_and_send():
                await trio.sleep(0.2)
                cs.cancel()
                await tx.send(1)

            async with trio.open_nursery() as n:

                n.start_soon(sub_and_recv)
                await trio.sleep(0.1)
                assert brx._state.recv_ready

                n.start_soon(cancel_and_send)

                # ensure that we don't hang because no-task is now
                # waiting on the underlying receive..
                with trio.fail_after(0.5):
                    value = await brx.receive()
                    print(f'parent: {value}')
                    assert value == 1
Пример #27
0
async def amap(async_fn,
               iterable,
               *,
               max_at_once=None,
               max_per_second=None,
               max_burst=1,
               iterable_is_async="guess",
               capture_outcome=False,
               include_index=False,
               include_value=False,
               max_buffer_size=0):
    send_channel, receive_channel = trio.open_memory_channel(max_buffer_size)
    async with receive_channel:
        async with trio.open_nursery() as nursery:
            nursery.start_soon(
                partial(
                    run_on_each,
                    # Pass through:
                    async_fn,
                    iterable,
                    max_at_once=max_at_once,
                    max_per_second=max_per_second,
                    max_burst=max_burst,
                    iterable_is_async=iterable_is_async,
                    capture_outcome=capture_outcome,
                    include_index=include_index,
                    include_value=include_value,
                    # Not a simple pass-through:
                    send_to=send_channel,
                ))
            await yield_(receive_channel)
Пример #28
0
async def test_wsgi_trio() -> None:
    middleware = TrioWSGIMiddleware(echo_body)
    scope = {
        "http_version": "1.1",
        "method": "GET",
        "path": "/",
        "query_string": b"a=b",
        "raw_path": b"/",
        "scheme": "http",
        "type": "http",
    }
    send_channel, receive_channel = trio.open_memory_channel(1)
    await send_channel.send({"type": "http.request"})

    messages = []

    async def _send(message: dict) -> None:
        nonlocal messages
        messages.append(message)

    await middleware(scope, receive_channel.receive, _send)
    assert messages == [
        {
            "headers": [(b"content-type", b"text/plain; charset=utf-8"), (b"content-length", b"0")],
            "status": 200,
            "type": "http.response.start",
        },
        {"body": bytearray(b""), "type": "http.response.body"},
    ]
Пример #29
0
    def __init__(self, queue_len=None):
        if queue_len is None:
            queue_len = 10000

        # Processing queue
        self._q_send, self._q_recv = trio.open_memory_channel(queue_len)

        # which files to close?
        self._close_files = set()

        # set up
        super().__init__(_TrioSelector())

        # replaced internal data
        self._ready = _Clear()
        self._scheduled = _Clear()
        self._default_executor = TrioExecutor()

        self._orig_signals = {}

        # we do our own timeout handling
        self._timers = []

        # Marker whether the loop is actually running
        self._stopped = trio.Event()
        self._stopped.set()
Пример #30
0
async def api_public_payment_longpolling(payment_hash):
    payment = await get_standalone_payment(payment_hash)

    if not payment:
        return jsonify({"message":
                        "Payment does not exist."}), HTTPStatus.NOT_FOUND
    elif not payment.pending:
        return jsonify({"status": "paid"}), HTTPStatus.OK

    try:
        invoice = bolt11.decode(payment.bolt11)
        expiration = datetime.datetime.fromtimestamp(invoice.date +
                                                     invoice.expiry)
        if expiration < datetime.datetime.now():
            return jsonify({"status": "expired"}), HTTPStatus.OK
    except:
        return jsonify({"message":
                        "Invalid bolt11 invoice."}), HTTPStatus.BAD_REQUEST

    send_payment, receive_payment = trio.open_memory_channel(0)

    print("adding standalone invoice listener", payment_hash, send_payment)
    api_invoice_listeners.append(send_payment)

    async for payment in receive_payment:
        if payment.payment_hash == payment_hash:
            return jsonify({"status": "paid"}), HTTPStatus.OK
Пример #31
0
 def init_endpoints(self) -> TrioChannelPair:
     """Initialize the pusher."""
     if self.maxlen is None:
         maxlen = math.inf
     else:
         maxlen = self.maxlen
     return trio.open_memory_channel(max_buffer_size=maxlen)
Пример #32
0
 def __init__(self, app, vault, update_on_idle=False):
     self.app = app
     self.vault = vault
     self.nursery = None # type: Nursery
     self.update_on_idle = update_on_idle
     self.logger = VaultLoggerAdapter(self.vault, logging.getLogger(__name__))
     send_channel, receive_channel = trio.open_memory_channel(128) # type: Tuple[trio.abc.SendChannel, trio.abc.ReceiveChannel]
     self.file_changes_send_channel = send_channel # type: trio.abc.SendChannel
     self.file_changes_receive_channel = receive_channel # type: trio.abc.ReceiveChannel
Пример #33
0
 def add_receiver(self, max_buffer_size) -> trio.abc.ReceiveChannel:
     """\
     Adds a receiver to this broadcast channel with the given buffer capacity.
     The send end of the receiver is closed when the broadcast channel is closed,
     and if the receive end is closed, it is discarded from the broadcast channel.
     """
     send, receive = trio.open_memory_channel(max_buffer_size)
     self._stack.push_async_exit(send)
     self._send_channels.add(send)
     return receive
Пример #34
0
    async def _poll_task(self, *, task_status=trio.TASK_STATUS_IGNORED):
        self.intervals, intervals = trio.open_memory_channel(0)
        task_status.started()

        async with trio.open_nursery() as nursery:
            async def poller(interval, *, task_status=trio.TASK_STATUS_IGNORED):
                with trio.CancelScope() as scope:
                    task_status.started(scope)
                    while True:
                        await self.streamer.send(await self.poll())
                        await trio.sleep(interval)

            @nursery.start_soon
            async def read_interval():
                scope = None
                async for interval in intervals:
                    # cancel the old polling task
                    if scope is not None:
                        scope.cancel()
                        scope = None

                    # start new polling task
                    if interval >= 0:
                        scope = await nursery.start(poller, interval)
Пример #35
0
 def __init__(self, *, update_buffer_size=10) -> None:
     self._updates_in, self._updates_out = trio.open_memory_channel(update_buffer_size)
     self._stack: AsyncExitStack = None
Пример #36
0
 def __init__(self, pool_size):
     self._size = pool_size
     self.send_channel, self.receive_channel = trio.open_memory_channel(pool_size)