示例#1
0
    async def wait_neighbours(self, remote: NodeAPI) -> Tuple[NodeAPI, ...]:
        """Wait for a neihgbours packet from the given node.

        Returns the list of neighbours received.
        """
        neighbours: List[NodeAPI] = []
        send_chan, recv_chan = trio.open_memory_channel[List[NodeAPI]](1)
        with trio.move_on_after(
                constants.KADEMLIA_REQUEST_TIMEOUT) as cancel_scope:
            # Responses to a FIND_NODE request are usually split between multiple
            # NEIGHBOURS packets, so we may have to read from the channel multiple times.
            gen = self.neighbours_channels.receive(remote, send_chan,
                                                   recv_chan)
            # mypy thinks wrapping our generator turns it into something else, so ignore.
            async with aclosing(gen):  # type: ignore
                async for batch in gen:
                    self.logger.debug2(
                        f'got expected neighbours response from {remote}: {batch}'
                    )
                    neighbours.extend(batch)
                    if len(neighbours) >= constants.KADEMLIA_BUCKET_SIZE:
                        break
            self.logger.debug2(
                f'got expected neighbours response from {remote}')
        if cancel_scope.cancelled_caught:
            self.logger.debug2(
                f'timed out waiting for {constants.KADEMLIA_BUCKET_SIZE} neighbours from '
                f'{remote}, got only {len(neighbours)}')
        return tuple(n for n in neighbours if n != self.this_node)
示例#2
0
    async def get_data_from_remote(
        self,
        obj,
        trigger_names: Iterable[str] = (),
        triggered_logged_names: Iterable[str] = (),
        logged_names: Iterable[str] = (),
        initial_properties: Iterable[str] = (),
        task_status=TASK_STATUS_IGNORED
    ) -> AsyncContextManager[AsyncGenerator]:
        data = self._get_remote_object_data_data(obj, trigger_names,
                                                 triggered_logged_names,
                                                 logged_names,
                                                 initial_properties)
        data = self.encode(data)

        uri = f'{self.uri}/api/v1/stream/data'
        response = await self._session.get(
            uri,
            data=data,
            headers={'Content-Type': 'application/json'},
            stream=True)
        raise_for_status(response)

        async with aclosing(self._generate_sse_events(response,
                                                      task_status)) as aiter:
            yield aiter
示例#3
0
    async def refine(self, input, output):
        if input is None and not self.has_default:
            # pylint: disable=line-too-long
            raise RuntimeError(
                'If Repeat is used as first section,  default value must be provided.'
            )

        async with trio.open_nursery() as nursery:

            async def repeater(item, *, task_status=trio.TASK_STATUS_IGNORED):
                with trio.CancelScope() as cancel_scope:
                    await output(item)
                    task_status.started(cancel_scope)
                    while True:
                        await trio.sleep(self.interval)
                        await output(item)

            running_repeater = None

            if self.has_default:
                running_repeater = await nursery.start(repeater, self.default)

            if input:
                async with aclosing(input) as aiter:
                    async for item in aiter:
                        if running_repeater:
                            running_repeater.cancel()
                        running_repeater = await nursery.start(repeater, item)
                nursery.cancel_scope.cancel()
示例#4
0
文件: broker.py 项目: M-o-a-T/qbroker
 async def stream(self, *args, **kwargs):
     """A remote procedure call that returns more than one reply.
     """
     # yield from self.conn.poll(*args, **kwargs)  # py3.7
     async with aclosing(self.conn.stream(*args, **kwargs)) as p:
         async for r in p:
             yield r
示例#5
0
    async def refine(self, input, output):
        if input is None and not self.has_default:
            # pylint: disable=line-too-long
            raise RuntimeError(
                'If Repeat is used as first section,  default value must be provided.'
            )

        async with trio.open_nursery() as nursery:

            def start_new_repeater(item):
                cancel_scope = trio.CancelScope()

                async def repeater():
                    with cancel_scope:
                        while True:
                            await trio.sleep(self.interval)
                            await output(item)

                nursery.start_soon(repeater)
                return cancel_scope

            previous_repeater = None

            if self.has_default:
                await output(self.default)
                previous_repeater = start_new_repeater(self.default)

            if input:
                async with aclosing(input) as aiter:
                    async for item in aiter:
                        if previous_repeater:
                            previous_repeater.cancel()
                        await output(item)
                        previous_repeater = start_new_repeater(item)
示例#6
0
async def main():
    try:
        async with AsyncioConnection(host='localhost') as connection:
            async with connection.get_channel() as channel:
                await channel.exchange_declare('direct_logs', type='direct')
                result = await channel.queue_declare('', exclusive=True)
                queue_name = result.queue

                severities = sys.argv[1:]
                if not severities:
                    sys.stderr.write("Usage: %s [info] [warning] [error]\n" %
                                     sys.argv[0])
                    sys.exit(1)

                for severity in severities:
                    await channel.queue_bind(
                        queue=queue_name,
                        exchange='direct_logs',
                        routing_key=severity,
                    )

                await channel.basic_consume(queue_name, no_ack=True)
                print(' [x] Waiting for logs. To exit press CTRL+C')

                async with aclosing(channel.delivered_messages()) as messages:
                    async for message in messages:
                        body = message.body.decode('utf-8')
                        severity = message.delivery_info.routing_key
                        print(" [x] Received %r:%r" % (severity, body))
    except asyncio.CancelledError:
        print(' [x] Bye!')
示例#7
0
    async def pipe_spawner_progress(dashboard_user, new_server_name, builder):

        while True:

            await sleep(0.01)

            if builder._build_future.done():
                break

            if new_server_name in dashboard_user.spawners and dashboard_user.spawners[new_server_name].pending \
                and dashboard_user.spawners[new_server_name]._spawn_future:

                spawner = dashboard_user.spawners[new_server_name]

                app_log.debug('Found spawner for progress')

                async with aclosing(
                        iterate_until(
                            builder._build_future,
                            spawner._generate_progress())) as spawnevents:
                    try:
                        async for event in spawnevents:
                            if 'message' in event:
                                builder.add_progress_event({
                                    'progress':
                                    95,
                                    'message':
                                    'Spawner progress: {}'.format(
                                        event['message'])
                                })
                    except CancelledError:
                        pass

                break
示例#8
0
async def fastpurger(e):
    if not e.text[0].isalpha() and e.text[0] not in ("/", "#", "@", "!"):
        chat = await e.get_input_chat()
        msgs = []
        count = 0
        async with aclosing(bot.iter_messages(chat,
                                              min_id=e.reply_to_msg_id)) as h:
            async for m in h:
                msgs.append(m)
                count = count + 1
                msgs.append(e.reply_to_msg_id)
                if len(msgs) == 100:
                    await bot.delete_messages(chat, msgs)
                    msgs = []
        if msgs:
            await bot.delete_messages(chat, msgs)
        r = await bot.send_message(
            e.chat_id,
            "`Fast purge complete!\n`Purged " + str(count) +
            " messages. **This auto-generated message shall be self destructed in 2 seconds.**",
        )
        if LOGGER:
            await bot.send_message(
                LOGGER_GROUP,
                "Purge of " + str(count) + " messages done successfully.")
        time.sleep(2)
        await r.delete()
示例#9
0
async def fastpurge(event):
    chat = await event.get_input_chat()
    msgs = []
    count = 0
    async with aclosing(
            client.iter_messages(chat, min_id=event.reply_to_msg_id)) as h:
        async for m in h:
            msgs.append(m)
            count = count + 1
            if len(msgs) == 100:
                await client.delete_messages(chat, msgs)
                msgs = []
    if msgs:
        await client.delete_messages(chat, msgs)
    await client.send_message(
        event.chat_id,
        "`Fast Purge Complete!\n`Purged " + str(count) + " messages.")
    await client.send_message(
        -266765687, "Purge of " + str(count) + " messages done successfully.")
    time.sleep(2)
    i = 1
    async for message in client.iter_messages(event.chat_id, from_user='******'):
        if i > 1:
            break
        i = i + 1
        await message.delete()
示例#10
0
文件: app.py 项目: yesrab/userbot
async def fastpurge(event):
    chat = await event.get_input_chat()
    msgs = []
    count = 0
    async with aclosing(
            client.iter_messages(chat, min_id=event.reply_to_msg_id)) as h:
        async for m in h:
            msgs.append(m)
            count = count + 1
            if len(msgs) == 100:
                await client.delete_messages(chat, msgs)
                msgs = []
    if msgs:
        await client.delete_messages(chat, msgs)
    await client.send_message(
        event.chat_id, "```Fast Purge Complete!\n```Purged " + str(count) +
        " messages. **This auto-generated message shall be self destructed in 2 seconds.**"
    )
    await client.send_message(
        -1001162835202,
        "Purge of " + str(count) + " messages done successfully.")
    time.sleep(2)
    i = 1
    async for message in client.iter_messages(event.chat_id, from_user='******'):
        if i > 1:
            break
        i = i + 1
        await message.delete()
示例#11
0
async def load_bots(client):
    bots = []
    async with aclosing(iter_buttons(client)) as it:
        async for button in it:
            bots.append((get_bot_id(button), button.text))

    return bots
 async def run(self, ctx: Context):
     async with aclosing(
             ctx.feed.entry_discovered.stream_events()) as stream:
         async for event in stream:
             print(
                 '------\npublished: {entry.published}\ntitle: {entry.title}\n'
                 'url: {entry.link}'.format(entry=event.entry))
示例#13
0
文件: purge.py 项目: Kritinic/userbot
async def fastpurger(e):
    if not e.text[0].isalpha() and e.text[0] != "!" and e.text[
            0] != "/" and e.text[0] != "#" and e.text[0] != "@":
        chat = await e.get_input_chat()
        msgs = []
        count = 0
        async with aclosing(bot.iter_messages(chat,
                                              min_id=e.reply_to_msg_id)) as h:
            async for m in h:
                msgs.append(m)
                count = count + 1
                if len(msgs) == 100:
                    await bot.delete_messages(chat, msgs)
                    msgs = []
        if msgs:
            await bot.delete_messages(chat, msgs)
        await bot.send_message(
            e.chat_id, "`Fast purge complete!\n`Purged " + str(count) +
            " messages. **This auto-generated message shall be self destructed in 2 seconds.**"
        )
        if LOGGER:
            await bot.send_message(
                LOGGER_GROUP,
                "Purge of " + str(count) + " messages done successfully.")
        time.sleep(2)
        i = 1
        async for message in bot.iter_messages(e.chat_id, from_user='******'):
            if i > 1:
                break
            i = i + 1
            await message.delete()
async def wrapper_mngr(
):
    from tractor.trionics import broadcast_receiver
    global _cached_stream
    in_aio = tractor.current_actor().is_infected_aio()

    if in_aio:
        if _cached_stream:

            from_aio = _cached_stream

            # if we already have a cached feed deliver a rx side clone
            # to consumer
            async with broadcast_receiver(from_aio, 6) as from_aio:
                yield from_aio
                return
        else:
            async with tractor.to_asyncio.open_channel_from(
                aio_streamer,
            ) as (first, from_aio):
                assert not first

                # cache it so next task uses broadcast receiver
                _cached_stream = from_aio

                yield from_aio
    else:
        async with aclosing(trio_streamer()) as stream:
            # cache it so next task uses broadcast receiver
            _cached_stream = stream
            yield stream
示例#15
0
async def main():
    try:
        async with AsyncioConnection(host='localhost') as connection:
            async with connection.get_channel() as channel:
                await channel.queue_declare('rpc_queue', durable=True)

                await channel.basic_qos(prefetch_count=1)
                await channel.basic_consume('rpc_queue')
                print(' [x] Waiting for messages. To exit press CTRL+C')

                async with aclosing(channel.delivered_messages()) as messages:
                    async for message in messages:
                        body = message.body.decode('utf-8')
                        n = int(body)
                        print(" [x] fib(%s)" % n)
                        response = fib(n)
                        print(" [x] Done: %s" % response)

                        reply_message = BasicContent(
                            str(response),
                            properties=BasicProperties(
                                correlation_id=message.properties.
                                correlation_id, ))
                        await channel.basic_publish(
                            reply_message.encode('utf-8'),
                            exchange='',
                            routing_key=message.properties.reply_to)
                        await channel.basic_ack(
                            delivery_tag=message.delivery_info.delivery_tag)
    except asyncio.CancelledError:
        print(' [x] Bye!')
示例#16
0
文件: broker.py 项目: M-o-a-T/qbroker
 async def poll(self, *args, **kwargs):
     """A poll call expects replies from more than one client.
     """
     # yield from self.conn.poll(*args, **kwargs)  # py3.7
     async with aclosing(self.conn.poll(*args, **kwargs)) as p:
         async for r in p:
             yield r
async def test_auth_fail():
    with pytest.raises(ConnectionRejected):
        async with open_bitmex_websocket('testnet', 'abcd1234',
                                         'efgh5678') as bws:
            async with aclosing(bws.listen('position')) as aiter:
                async for item in aiter:
                    assert False
示例#18
0
    async def wrapper_gen(self, *args, **kwargs):
        executor: Executor = getattr(
            self, 'pymoa_executor', current_executor.get())
        if executor is None:
            async def eat_generator():
                callback_fn = Executor.get_execute_callback_func(
                    self, callback)
                call_callback = Executor.call_execute_callback_func

                for yield_val in func(self, *args, **kwargs):
                    call_callback(yield_val, callback_fn)
                    yield yield_val
                    await trio.lowlevel.checkpoint()

            gen = eat_generator()
        else:
            if not executor.supports_non_coroutine:
                raise ValueError(
                    f'apply_executor called with normal function "{func}", but '
                    f'Executor "{executor}" only supports async coroutines')

            gen = executor.execute_generator(
                self, func, args, kwargs, callback)

        async with aclosing(gen) as aiter:
            yield aiter
示例#19
0
    async def wrapper_coro_gen(self, *args, **kwargs):
        executor: Executor = getattr(
            self, 'pymoa_executor', current_executor.get())
        if executor is None:
            async def eat_generator():
                callback_fn = Executor.get_execute_callback_func(
                    self, callback)
                call_callback = Executor.call_execute_callback_func

                async for yield_val in func(self, *args, **kwargs):
                    call_callback(yield_val, callback_fn)
                    yield yield_val

            gen = eat_generator()
        else:
            if not executor.supports_coroutine:
                raise ValueError(
                    f'apply_executor called with async coroutine "{func}", but'
                    f' Executor "{executor}" does not support coroutines')

            gen = executor.execute_generator(
                self, func, args, kwargs, callback)

        async with aclosing(gen) as aiter:
            yield aiter
示例#20
0
    async def _apply_data_from_remote(self, obj, gen):
        initial = True
        async with aclosing(gen) as aiter:
            async for data in aiter:
                data = data['data']
                if initial:
                    initial = False

                    if 'initial_properties' in data:
                        for key, value in data['initial_properties'].items():
                            if key.startswith('on_'):
                                obj.dispatch(key, obj, *value)
                            else:
                                setattr(obj, key, value)

                trigger_name = data['logged_trigger_name']
                trigger_value = data['logged_trigger_value']
                props = data['logged_items']

                for key, value in props.items():
                    if key.startswith('on_'):
                        obj.dispatch(key, obj, *value)
                    else:
                        setattr(obj, key, value)

                if trigger_name:
                    if trigger_name.startswith('on_'):
                        obj.dispatch(trigger_name, *trigger_value)
                    else:
                        setattr(obj, trigger_name, trigger_value)
示例#21
0
 async def adapter():
     async with send_channel, aclosing(wrapped(*args,
                                               **kwargs)) as agen:
         while True:
             try:
                 # Advance underlying async generator to next yield
                 value = await agen.__anext__()
             except StopAsyncIteration:
                 break
             while True:
                 try:
                     # Forward the yielded value into the send channel
                     try:
                         await send_channel.send(value)
                     except trio.BrokenResourceError:
                         return
                     break
                 except BaseException:  # pylint: disable=broad-except
                     # If send_channel.send() raised (e.g. Cancelled),
                     # throw the raised exception back into the generator,
                     # and get the next yielded value to forward.
                     try:
                         value = await agen.athrow(*sys.exc_info())
                     except StopAsyncIteration:
                         return
示例#22
0
    async def read(self):
        """
        Read and decode the request body according to its content type.

        Forms (``application/x-www-form-urlencoded`` and ``multipart/form-data``) are automatically
        parsed. If instead the content type matches a known serializer, it is used to deserialize
        the request body. If the content type's main part is ``text``, the body is decoded as
        unicode using the given charset. Otherwise, the binary body is returned as is.

        :return: the decoded body

        """
        if self.content_type == 'application/x-www-form-urlencoded':
            body = await self._body.read()
            return parse_qs(body, strict_parsing=True, encoding=self.charset or 'utf-8',
                            errors='strict')
        elif self.content_type == 'multipart/form-data':
            form = {}
            async with aclosing(self.read_iter()) as stream:
                async for field in stream:
                    form[field.name] = field
        else:
            body = await self._body.read()
            if self.content_type.startswith('text/'):
                return body.decode(self.charset or 'utf-8')
            else:
                return body
示例#23
0
 async def catch_events():
     events = []
     async with aclosing(stream_events(
             [feed.metadata_changed, feed.entry_discovered])) as stream:
         async for event in stream:
             events.append(event)
             if len(events) == 3:
                 return events
示例#24
0
async def test_iterate_until(io_loop, deadline, n, delay, expected):
    f = schedule_future(io_loop, delay=deadline)

    yielded = []
    async with aclosing(iterate_until(f, yield_n(n, delay=delay))) as items:
        async for item in items:
            yielded.append(item)
    assert yielded == expected
示例#25
0
 async def pull_task(index, source, monitor=False):
     async with aclosing(weld(nursery, source)) as aiter:
         async for item in aiter:
             results[index] = item
             ready[index] = True
             if not monitor and (self.partial or False not in ready):
                 await output(tuple(results))
     nursery.cancel_scope.cancel()
示例#26
0
async def test_iterate_until(io_loop, deadline, n, delay, expected):
    f = schedule_future(io_loop, delay=deadline)

    yielded = []
    async with aclosing(iterate_until(f, yield_n(n, delay=delay))) as items:
        async for item in items:
            yielded.append(item)
    assert yielded == expected
示例#27
0
 async def run(self, ctx):
     diff = HtmlDiff()
     async with aclosing(ctx.detector.changed.stream_events()) as stream:
         async for event in stream:
             difference = diff.make_file(event.old_lines, event.new_lines, context=True)
             await ctx.mailer.create_and_deliver(
                 subject='Change detected in %s' % event.source.url, html_body=difference)
             logger.info('Sent notification email')
示例#28
0
 async def asyncIterFeed(self, char, charPos=None):
     self._input.append((char, charPos))
     while self._input:
         char, charPos = self._input.pop(0)
         async with aclosing(self._asyncFeed(char, charPos)) as agen:
             async for tok in agen:
                 value = await self.asyncNewToken(tok)
                 if value is not None:
                     await yield_(value)
示例#29
0
async def fan_out_to_ctxs(
    pub_async_gen_func: typing.Callable,  # it's an async gen ... gd mypy
    topics2ctxs: Dict[str, list],
    packetizer: typing.Callable = None,
) -> None:
    '''
    Request and fan out quotes to each subscribed actor channel.

    '''
    def get_topics():
        return tuple(topics2ctxs.keys())

    agen = pub_async_gen_func(get_topics=get_topics)

    async with aclosing(agen) as pub_gen:

        async for published in pub_gen:

            ctx_payloads: List[Tuple[Context, Any]] = []

            for topic, data in published.items():
                log.debug(f"publishing {topic, data}")

                # build a new dict packet or invoke provided packetizer
                if packetizer is None:
                    packet = {topic: data}

                else:
                    packet = packetizer(topic, data)

                for ctx in topics2ctxs.get(topic, list()):
                    ctx_payloads.append((ctx, packet))

            if not ctx_payloads:
                log.debug(f"Unconsumed values:\n{published}")

            # deliver to each subscriber (fan out)
            if ctx_payloads:
                for ctx, payload in ctx_payloads:
                    try:
                        await ctx.send_yield(payload)
                    except (
                            # That's right, anything you can think of...
                            trio.ClosedResourceError,
                            ConnectionResetError,
                            ConnectionRefusedError,
                    ):
                        log.warning(f"{ctx.chan} went down?")
                        for ctx_list in topics2ctxs.values():
                            try:
                                ctx_list.remove(ctx)
                            except ValueError:
                                continue

            if not get_topics():
                log.warning(f"No subscribers left for {pub_gen}")
                break
示例#30
0
async def main():
    async with open_bitmex_websocket('testnet') as bws:
        count = 0
        async with aclosing(bws.listen('instrument')) as agen:
            async for msg in agen:
                print(f'Received message, symbol: \'{msg["symbol"]}\', timestamp: \'{msg["timestamp"]}\'')
                count += 1
                if count == 5:
                    break
示例#31
0
    async def async_main():
        # GC'ed before exhausted
        with pytest.warns(
                ResourceWarning,
                match="Async generator.*collected before.*exhausted",
        ):
            assert 42 == await example("abandoned").asend(None)
            gc_collect_harder()
        await _core.wait_all_tasks_blocked()
        assert collected.pop() == "abandoned"

        # aclosing() ensures it's cleaned up at point of use
        async with aclosing(example("exhausted 1")) as aiter:
            assert 42 == await aiter.asend(None)
        assert collected.pop() == "exhausted 1"

        # Also fine if you exhaust it at point of use
        async for val in example("exhausted 2"):
            assert val == 42
        assert collected.pop() == "exhausted 2"

        gc_collect_harder()

        # No problems saving the geniter when using either of these patterns
        async with aclosing(example("exhausted 3")) as aiter:
            saved.append(aiter)
            assert 42 == await aiter.asend(None)
        assert collected.pop() == "exhausted 3"

        # Also fine if you exhaust it at point of use
        saved.append(example("exhausted 4"))
        async for val in saved[-1]:
            assert val == 42
        assert collected.pop() == "exhausted 4"

        # Leave one referenced-but-unexhausted and make sure it gets cleaned up
        if buggy_pypy_asyncgens:
            collected.append("outlived run")
        else:
            saved.append(example("outlived run"))
            assert 42 == await saved[-1].asend(None)
            assert collected == []
示例#32
0
async def example(app=None):
    async with qbroker.open_broker("example.list_servers", cfg=cfg) as _u:
        global u
        u = _u
        await trio.sleep(1)
        d = {}
        if app is not None:
            d['app'] = app
        async with aclosing(u.poll("qbroker.ping", call_conv=CC_DATA, timeout=2, _data=d)) as r:
            async for msg in r:
                await cb(msg)
示例#33
0
文件: app.py 项目: AvdN/asphalt
 async def run(self, ctx):
     diff = HtmlDiff()
     async with aclosing(ctx.detector.changed.stream_events()) as stream:
         async for event in stream:
             difference = diff.make_file(event.old_lines,
                                         event.new_lines,
                                         context=True)
             await ctx.mailer.create_and_deliver(
                 subject='Change detected in %s' % event.source.url,
                 html_body=difference)
             logger.info('Sent notification email')
示例#34
0
    async def get_channel_from_remote(self,
                                      hash_name: str,
                                      channel: str,
                                      task_status=TASK_STATUS_IGNORED
                                      ) -> AsyncContextManager[AsyncGenerator]:
        data = self._get_remote_object_channel_data(hash_name, channel)
        data = {'stream': channel, 'data': data}

        async with aclosing(self._generate_stream_events(
                data, task_status)) as aiter:
            yield aiter
示例#35
0
    async def refine(self, input, output):
        if input:
            source = input
        elif self.source:
            source = self.source
        else:
            raise RuntimeError('No input provided.')

        async with aclosing(source) as aiter:
            async for item in aiter:
                await output(self.func(item))
示例#36
0
async def send_file(ctx: CallContext, path: str):
    final_path = ctx.base_path / path
    if not final_path.is_file():
        raise Exception('{} is not a file'.format(path))

    async with open_async(final_path, 'rb') as f:
        logger.info('Sending %s', path)
        async with aclosing(f.async_readchunks(65536)) as stream:
            async for chunk in stream:
                ctx.progress(chunk)

    logger.info('Finished sending %s', path)
示例#37
0
async def main():

    async with worker_pool() as actor_map:

        start = time.time()

        async with aclosing(actor_map(is_prime, PRIMES)) as results:
            async for number, prime in results:

                print(f'{number} is prime: {prime}')

        print(f'processing took {time.time() - start} seconds')
示例#38
0
async def test_stream_events_memleak():
    """Test that closing but never iterating the event stream will not cause a memory leak."""
    source = DummySource()
    gc.collect()
    gc.collect()
    num_queues_before = len([x for x in gc.get_objects() if type(x) is Queue])
    async with aclosing(stream_events([source.event_a])):
        pass

    gc.collect()
    gc.collect()
    num_queues_after = len([x for x in gc.get_objects() if type(x) is Queue])
    assert num_queues_after == num_queues_before
示例#39
0
async def test_iterate_until_ready_after_deadline(io_loop):
    f = schedule_future(io_loop, delay=0)

    @async_generator
    async def gen():
        for i in range(5):
            await yield_(i)

    yielded = []
    async with aclosing(iterate_until(f, gen())) as items:
        async for item in items:
            yielded.append(item)
    assert yielded == list(range(5))
示例#40
0
    async def test_stream_events(self, source, filter, expected_values):
        values = []
        async with aclosing(source.event_a.stream_events(filter)) as stream:
            for i in range(1, 4):
                source.event_a.dispatch(i)

            source.event_a.dispatch(None)

            async for event in stream:
                if event.args[0] is not None:
                    values.append(event.args[0])
                else:
                    break

        assert values == expected_values
示例#41
0
    async def _generate_progress(self):
        """Private wrapper of progress generator

        This method is always an async generator and will always yield at least one event.
        """
        if not self._spawn_pending:
            raise RuntimeError("Spawn not pending, can't generate progress")

        await yield_({
            "progress": 0,
            "message": "Server requested",
        })
        from async_generator import aclosing

        async with aclosing(self.progress()) as progress:
            async for event in progress:
                await yield_(event)
示例#42
0
async def test_stream_events(filter, expected_values):
    source1, source2 = DummySource(), DummySource()
    values = []
    async with aclosing(stream_events([source1.event_a, source2.event_b], filter)) as stream:
        for signal in [source1.event_a, source2.event_b]:
            for i in range(1, 4):
                signal.dispatch(i)

        source1.event_a.dispatch(None)

        async for event in stream:
            if event.args[0] is not None:
                values.append(event.args[0])
            else:
                break

    assert values == expected_values
示例#43
0
async def iterate_until(deadline_future, generator):
    """An async generator that yields items from a generator
    until a deadline future resolves

    This could *almost* be implemented as a context manager
    like asyncio_timeout with a Future for the cutoff.

    However, we want one distinction: continue yielding items
    after the future is complete, as long as the are already finished.

    Usage::

        async for item in iterate_until(some_future, some_async_generator()):
            print(item)

    """
    async with aclosing(generator.__aiter__()) as aiter:
        while True:
            item_future = asyncio.ensure_future(aiter.__anext__())
            await asyncio.wait(
                [item_future, deadline_future], return_when=asyncio.FIRST_COMPLETED
            )
            if item_future.done():
                try:
                    await yield_(item_future.result())
                except (StopAsyncIteration, asyncio.CancelledError):
                    break
            elif deadline_future.done():
                # deadline is done *and* next item is not ready
                # cancel item future to avoid warnings about
                # unawaited tasks
                if not item_future.cancelled():
                    item_future.cancel()
                # resolve cancellation to avoid garbage collection issues
                try:
                    await item_future
                except asyncio.CancelledError:
                    pass
                break
            else:
                # neither is done, this shouldn't happen
                continue
示例#44
0
    async def get(self, username, server_name=''):
        self.set_header('Cache-Control', 'no-cache')
        if server_name is None:
            server_name = ''
        user = self.find_user(username)
        if user is None:
            # no such user
            raise web.HTTPError(404)
        if server_name not in user.spawners:
            # user has no such server
            raise web.HTTPError(404)
        spawner = user.spawners[server_name]

        # start sending keepalive to avoid proxies closing the connection
        asyncio.ensure_future(self.keepalive())
        # cases:
        # - spawner already started and ready
        # - spawner not running at all
        # - spawner failed
        # - spawner pending start (what we expect)
        url = url_path_join(user.url, server_name, '/')
        ready_event = {
            'progress': 100,
            'ready': True,
            'message': "Server ready at {}".format(url),
            'html_message': 'Server ready at <a href="{0}">{0}</a>'.format(url),
            'url': url,
        }
        failed_event = {'progress': 100, 'failed': True, 'message': "Spawn failed"}

        if spawner.ready:
            # spawner already ready. Trigger progress-completion immediately
            self.log.info("Server %s is already started", spawner._log_name)
            await self.send_event(ready_event)
            return

        spawn_future = spawner._spawn_future

        if not spawner._spawn_pending:
            # not pending, no progress to fetch
            # check if spawner has just failed
            f = spawn_future
            if f and f.done() and f.exception():
                failed_event['message'] = "Spawn failed: %s" % f.exception()
                await self.send_event(failed_event)
                return
            else:
                raise web.HTTPError(400, "%s is not starting...", spawner._log_name)

        # retrieve progress events from the Spawner
        async with aclosing(
            iterate_until(spawn_future, spawner._generate_progress())
        ) as events:
            async for event in events:
                # don't allow events to sneakily set the 'ready' flag
                if 'ready' in event:
                    event.pop('ready', None)
                await self.send_event(event)

        # progress finished, wait for spawn to actually resolve,
        # in case progress finished early
        # (ignore errors, which will be logged elsewhere)
        await asyncio.wait([spawn_future])

        # progress and spawn finished, check if spawn succeeded
        if spawner.ready:
            # spawner is ready, signal completion and redirect
            self.log.info("Server %s is ready", spawner._log_name)
            await self.send_event(ready_event)
        else:
            # what happened? Maybe spawn failed?
            f = spawn_future
            if f and f.done() and f.exception():
                failed_event['message'] = "Spawn failed: %s" % f.exception()
            else:
                self.log.warning(
                    "Server %s didn't start for unknown reason", spawner._log_name
                )
            await self.send_event(failed_event)
示例#45
0
文件: conn.py 项目: M-o-a-T/qbroker
 async def stream(self, *args, **kwargs):
     async with aclosing(self.poll(*args, _MsgClass=StreamMsg, **kwargs)) as s:
         async for x in s:
             yield x
示例#46
0
文件: broker.py 项目: M-o-a-T/qbroker
 async def poll_first(self, *args, **kwargs):
     """An alert call returns the first reply.
     """
     async with aclosing(self.conn.poll(*args, **kwargs)) as p:
         async for r in p:
             return r