コード例 #1
0
async def test_cancel_remote_arbiter(daemon, arb_addr):
    assert not tractor.current_actor().is_arbiter
    async with tractor.get_arbiter(*arb_addr) as portal:
        await portal.cancel_actor()

    time.sleep(0.1)
    # the arbiter channel server is cancelled but not its main task
    assert daemon.returncode is None

    # no arbiter socket should exist
    with pytest.raises(OSError):
        async with tractor.get_arbiter(*arb_addr) as portal:
            pass
コード例 #2
0
async def test_reg_then_unreg(arb_addr):
    actor = tractor.current_actor()
    assert actor.is_arbiter
    assert len(actor._registry) == 1  # only self is registered

    async with tractor.open_nursery() as n:
        portal = await n.start_actor('actor', rpc_module_paths=[__name__])
        uid = portal.channel.uid

        async with tractor.get_arbiter(*arb_addr) as aportal:
            # this local actor should be the arbiter
            assert actor is aportal.actor

            async with tractor.wait_for_actor('actor'):
                # sub-actor uid should be in the registry
                assert uid in aportal.actor._registry
                sockaddrs = actor._registry[uid]
                # XXX: can we figure out what the listen addr will be?
                assert sockaddrs

        await n.cancel()  # tear down nursery

        await trio.sleep(0.1)
        assert uid not in aportal.actor._registry
        sockaddrs = actor._registry[uid]
        assert not sockaddrs
コード例 #3
0
async def test_self_is_registered_localportal(arb_addr):
    "Verify waiting on the arbiter to register itself using a local portal."
    actor = tractor.current_actor()
    assert actor.is_arbiter
    async with tractor.get_arbiter(*arb_addr) as portal:
        assert isinstance(portal, tractor._portal.LocalPortal)
        sockaddr = await portal.run('self', 'wait_for_actor', name='arbiter')
        assert sockaddr[0] == arb_addr
コード例 #4
0
ファイル: test_discovery.py プロジェクト: goodboy/tractor
async def close_chans_before_nursery(
    arb_addr: tuple,
    use_signal: bool,
    remote_arbiter: bool = False,
) -> None:

    # logic for how many actors should still be
    # in the registry at teardown.
    if remote_arbiter:
        entries_at_end = 2
    else:
        entries_at_end = 1

    async with tractor.open_root_actor(arbiter_addr=arb_addr, ):
        async with tractor.get_arbiter(*arb_addr) as aportal:
            try:
                get_reg = partial(unpack_reg, aportal)

                async with tractor.open_nursery() as tn:
                    portal1 = await tn.start_actor(name='consumer1',
                                                   enable_modules=[__name__])
                    portal2 = await tn.start_actor('consumer2',
                                                   enable_modules=[__name__])

                    # TODO: compact this back as was in last commit once
                    # 3.9+, see https://github.com/goodboy/tractor/issues/207
                    async with portal1.open_stream_from(
                        stream_forever) as agen1:
                        async with portal2.open_stream_from(
                                stream_forever) as agen2:
                            async with trio.open_nursery() as n:
                                n.start_soon(streamer, agen1)
                                n.start_soon(cancel, use_signal, .5)
                                try:
                                    await streamer(agen2)
                                finally:
                                    # Kill the root nursery thus resulting in
                                    # normal arbiter channel ops to fail during
                                    # teardown. It doesn't seem like this is
                                    # reliably triggered by an external SIGINT.
                                    # tractor.current_actor()._root_nursery.cancel_scope.cancel()

                                    # XXX: THIS IS THE KEY THING that
                                    # happens **before** exiting the
                                    # actor nursery block

                                    # also kill off channels cuz why not
                                    await agen1.aclose()
                                    await agen2.aclose()
            finally:
                with trio.CancelScope(shield=True):
                    await trio.sleep(1)

                    # all subactors should have de-registered
                    registry = await get_reg()
                    assert portal1.channel.uid not in registry
                    assert portal2.channel.uid not in registry
                    assert len(registry) == entries_at_end
コード例 #5
0
 async def list_services():
     async with tractor.get_arbiter(
             *tractor.current_actor()._arb_addr) as portal:
         registry = await portal.run('self', 'get_registry')
         json_d = {}
         for uid, socket in registry.items():
             name, uuid = uid
             host, port = socket
             json_d[f'{name}.{uuid}'] = f'{host}:{port}'
         click.echo(f"Available `piker` services:\n{colorize_json(json_d)}")
コード例 #6
0
    async def list_services():

        async with tractor.get_arbiter(
            *_tractor_kwargs['arbiter_addr']
        ) as portal:
            registry = await portal.run_from_ns('self', 'get_registry')
            json_d = {}
            for key, socket in registry.items():
                # name, uuid = uid
                host, port = socket
                json_d[key] = f'{host}:{port}'
            click.echo(f"{colorize_json(json_d)}")
コード例 #7
0
async def main(service_name):

    async with tractor.open_nursery() as an:
        await an.start_actor(service_name)

        async with tractor.get_arbiter('127.0.0.1', 1616) as portal:
            print(f"Arbiter is listening on {portal.channel}")

        async with tractor.wait_for_actor(service_name) as sockaddr:
            print(f"my_service is found at {sockaddr}")

        await an.cancel()
コード例 #8
0
async def spawn_and_check_registry(
    arb_addr: tuple,
    use_signal: bool,
    remote_arbiter: bool = False,
    with_streaming: bool = False,
) -> None:

    async with tractor.open_root_actor(arbiter_addr=arb_addr, ):
        async with tractor.get_arbiter(*arb_addr) as portal:
            # runtime needs to be up to call this
            actor = tractor.current_actor()

            if remote_arbiter:
                assert not actor.is_arbiter

            if actor.is_arbiter:

                async def get_reg():
                    return actor._registry

                extra = 1  # arbiter is local root actor
            else:
                get_reg = partial(portal.run_from_ns, 'self', 'get_registry')
                extra = 2  # local root actor + remote arbiter

            # ensure current actor is registered
            registry = await get_reg()
            assert actor.uid in registry

            try:
                async with tractor.open_nursery() as n:
                    async with trio.open_nursery() as trion:

                        portals = {}
                        for i in range(3):
                            name = f'a{i}'
                            if with_streaming:
                                portals[name] = await n.start_actor(
                                    name=name, enable_modules=[__name__])

                            else:  # no streaming
                                portals[name] = await n.run_in_actor(
                                    trio.sleep_forever, name=name)

                        # wait on last actor to come up
                        async with tractor.wait_for_actor(name):
                            registry = await get_reg()
                            for uid in n._children:
                                assert uid in registry

                        assert len(portals) + extra == len(registry)

                        if with_streaming:
                            await trio.sleep(0.1)

                            pts = list(portals.values())
                            for p in pts[:-1]:
                                trion.start_soon(stream_from, p)

                            # stream for 1 sec
                            trion.start_soon(cancel, use_signal, 1)

                            last_p = pts[-1]
                            await stream_from(last_p)

                        else:
                            await cancel(use_signal)

            finally:
                with trio.CancelScope(shield=True):
                    await trio.sleep(0.5)

                    # all subactors should have de-registered
                    registry = await get_reg()
                    assert len(registry) == extra
                    assert actor.uid in registry