Beispiel #1
0
async def test_proxy_peer_requests(request, event_bus, other_event_bus,
                                   event_loop, chaindb_20, client_and_server):
    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = client_and_server

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with AsyncExitStack() as stack:
        await stack.enter_async_context(
            run_peer_pool_event_server(client_event_bus,
                                       client_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        await stack.enter_async_context(
            run_peer_pool_event_server(server_event_bus,
                                       server_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        await stack.enter_async_context(
            background_asyncio_service(
                ETHRequestServer(server_event_bus,
                                 TO_NETWORKING_BROADCAST_CONFIG,
                                 AsyncChainDB(chaindb_20.db))))

        client_proxy_peer_pool = ETHProxyPeerPool(
            client_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(client_proxy_peer_pool))

        proxy_peer_pool = ETHProxyPeerPool(server_event_bus,
                                           TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(proxy_peer_pool))

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.session)

        headers = await proxy_peer.eth_api.get_block_headers(0, 1, 0, False)

        assert len(headers) == 1
        block_header = headers[0]
        assert block_header.block_number == 0

        receipts = await proxy_peer.eth_api.get_receipts(headers)
        assert len(receipts) == 1
        receipt = receipts[0]
        assert receipt[1][0] == block_header.receipt_root

        block_bundles = await proxy_peer.eth_api.get_block_bodies(headers)
        assert len(block_bundles) == 1
        first_bundle = block_bundles[0]
        assert first_bundle[1][0] == block_header.transaction_root

        node_data = await proxy_peer.eth_api.get_node_data(
            (block_header.state_root, ))
        assert node_data[0][0] == block_header.state_root
Beispiel #2
0
async def test_proxy_peer_requests_with_timeouts(request, event_bus,
                                                 other_event_bus, event_loop,
                                                 client_and_server):

    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = client_and_server

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with contextlib.AsyncExitStack() as stack:
        await stack.enter_async_context(
            run_peer_pool_event_server(client_event_bus,
                                       client_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))
        await stack.enter_async_context(
            run_peer_pool_event_server(server_event_bus,
                                       server_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        # We just want an ETHRequestServer that doesn't answer us but we still have to run
        # *something* to at least subscribe to the events. Otherwise Lahja's safety check will yell
        # at us for sending requests into the void.
        for event_type in ETHRequestServer(None, None,
                                           None)._subscribed_events:
            server_event_bus.subscribe(event_type, lambda _: None)

        client_proxy_peer_pool = ETHProxyPeerPool(
            client_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(client_proxy_peer_pool))

        server_proxy_peer_pool = ETHProxyPeerPool(
            server_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(server_proxy_peer_pool))

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.session)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_block_headers(0,
                                                       1,
                                                       0,
                                                       False,
                                                       timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_receipts((), timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_block_bodies((), timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_node_data((), timeout=0.01)
Beispiel #3
0
async def test_requests_when_peer_in_client_vanishs(request, event_bus,
                                                    other_event_bus,
                                                    event_loop, chaindb_20,
                                                    client_and_server):

    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = client_and_server

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with contextlib.AsyncExitStack() as stack:
        await stack.enter_async_context(
            run_peer_pool_event_server(client_event_bus,
                                       client_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))
        await stack.enter_async_context(
            run_peer_pool_event_server(server_event_bus,
                                       server_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        await stack.enter_async_context(
            background_asyncio_service(
                ETHRequestServer(server_event_bus,
                                 TO_NETWORKING_BROADCAST_CONFIG,
                                 MainnetChain.vm_configuration,
                                 AsyncChainDB(chaindb_20.db))))
        client_proxy_peer_pool = ETHProxyPeerPool(
            client_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(client_proxy_peer_pool))

        server_proxy_peer_pool = ETHProxyPeerPool(
            server_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(server_proxy_peer_pool))

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.session)

        # We remove the peer from the client and assume to see PeerConnectionLost exceptions raised
        client_peer_pool.connected_nodes.pop(client_peer.session)

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.eth_api.get_block_headers(0, 1, 0, False)

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.eth_api.get_receipts(())

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.eth_api.get_block_bodies(())

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.eth_api.get_node_data(())
Beispiel #4
0
async def two_connected_tx_pools(event_bus, other_event_bus, event_loop,
                                 funded_address_private_key,
                                 chain_with_block_validation, tx_validator,
                                 client_and_server):

    alice_event_bus = event_bus
    bob_event_bus = other_event_bus
    bob, alice = client_and_server

    bob_peer_pool = MockPeerPoolWithConnectedPeers([bob],
                                                   event_bus=bob_event_bus)
    alice_peer_pool = MockPeerPoolWithConnectedPeers([alice],
                                                     event_bus=alice_event_bus)

    async with contextlib.AsyncExitStack() as stack:
        await stack.enter_async_context(
            run_peer_pool_event_server(bob_event_bus,
                                       bob_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        await stack.enter_async_context(
            run_peer_pool_event_server(alice_event_bus,
                                       alice_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        bob_proxy_peer_pool = ETHProxyPeerPool(bob_event_bus,
                                               TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(bob_proxy_peer_pool))

        alice_proxy_peer_pool = ETHProxyPeerPool(
            alice_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(alice_proxy_peer_pool))

        alice_tx_pool = TxPool(
            alice_event_bus,
            alice_proxy_peer_pool,
            tx_validator,
        )
        await stack.enter_async_context(
            background_asyncio_service(alice_tx_pool))

        bob_tx_pool = TxPool(
            bob_event_bus,
            bob_proxy_peer_pool,
            tx_validator,
        )
        await stack.enter_async_context(background_asyncio_service(bob_tx_pool)
                                        )

        yield (
            alice,
            alice_event_bus,
            alice_tx_pool,
        ), (bob, bob_event_bus, bob_tx_pool)
Beispiel #5
0
async def test_proxy_peer_requests(request, event_bus, other_event_bus,
                                   event_loop, chaindb_fresh, chaindb_20):
    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = await get_directly_linked_peers(
        request,
        event_loop,
        alice_headerdb=FakeAsyncChainDB(chaindb_fresh.db),
        bob_headerdb=FakeAsyncChainDB(chaindb_20.db),
    )

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with run_peer_pool_event_server(
            client_event_bus,
            client_peer_pool,
            handler_type=ETHPeerPoolEventServer), run_peer_pool_event_server(
                server_event_bus,
                server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_request_server(
                    server_event_bus,
                    FakeAsyncChainDB(chaindb_20.db)), run_proxy_peer_pool(
                        client_event_bus
                    ) as client_proxy_peer_pool, run_proxy_peer_pool(
                        server_event_bus):

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.remote)

        headers = await proxy_peer.requests.get_block_headers(0, 1, 0, False)

        assert len(headers) == 1
        block_header = headers[0]
        assert block_header.block_number == 0

        receipts = await proxy_peer.requests.get_receipts(headers)
        assert len(receipts) == 1
        receipt = receipts[0]
        assert receipt[1][0] == block_header.receipt_root

        block_bundles = await proxy_peer.requests.get_block_bodies(headers)
        assert len(block_bundles) == 1
        first_bundle = block_bundles[0]
        assert first_bundle[1][0] == block_header.transaction_root

        node_data = await proxy_peer.requests.get_node_data(
            (block_header.state_root, ))
        assert node_data[0][0] == block_header.state_root
Beispiel #6
0
async def test_get_pooled_transactions_request(request, event_bus,
                                               other_event_bus, event_loop,
                                               chaindb_20, client_and_server):
    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = client_and_server

    if get_highest_eth_protocol_version(client_peer) < ETHProtocolV65.version:
        pytest.skip("Test not applicable below eth/65")

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with contextlib.AsyncExitStack() as stack:
        await stack.enter_async_context(
            run_peer_pool_event_server(client_event_bus,
                                       client_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        await stack.enter_async_context(
            run_peer_pool_event_server(server_event_bus,
                                       server_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        client_proxy_peer_pool = ETHProxyPeerPool(
            client_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(client_proxy_peer_pool))

        proxy_peer_pool = ETHProxyPeerPool(server_event_bus,
                                           TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(
            background_asyncio_service(proxy_peer_pool))

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.session)

        # The reason we run this test separately from the other request tests is because
        # GetPooledTransactions requests should be answered from the tx pool which the previous
        # test does not depend on.
        await stack.enter_async_context(
            background_asyncio_service(
                TxPool(server_event_bus, proxy_peer_pool, lambda _: True)))

        # The tx pool always answers these with an empty response
        txs = await proxy_peer.eth_api.get_pooled_transactions((decode_hex(
            '0x9ea39df6210064648ecbc465cd628fe52f69af53792e1c2f27840133435159d4'
        ), ))
        assert len(txs) == 0
Beispiel #7
0
async def test_requests_when_peer_in_client_vanishs(request, event_bus,
                                                    other_event_bus,
                                                    event_loop, chaindb_fresh,
                                                    chaindb_20):

    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = await get_directly_linked_peers(
        request,
        event_loop,
        alice_headerdb=FakeAsyncChainDB(chaindb_fresh.db),
        bob_headerdb=FakeAsyncChainDB(chaindb_20.db),
    )

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with run_peer_pool_event_server(
            client_event_bus,
            client_peer_pool,
            handler_type=ETHPeerPoolEventServer), run_peer_pool_event_server(
                server_event_bus,
                server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_request_server(
                    server_event_bus,
                    FakeAsyncChainDB(chaindb_20.db)), run_proxy_peer_pool(
                        client_event_bus
                    ) as client_proxy_peer_pool, run_proxy_peer_pool(
                        server_event_bus):

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.remote)

        # We remove the peer from the client and assume to see PeerConnectionLost exceptions raised
        client_peer_pool.connected_nodes.pop(client_peer.remote)

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.requests.get_block_headers(0, 1, 0, False)

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.requests.get_receipts(())

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.requests.get_block_bodies(())

        with pytest.raises(PeerConnectionLost):
            await proxy_peer.requests.get_node_data(())
Beispiel #8
0
async def test_proxy_peer_requests_with_timeouts(request, event_bus,
                                                 other_event_bus, event_loop,
                                                 client_and_server):

    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = client_and_server

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with AsyncExitStack() as stack:
        await stack.enter_async_context(
            run_peer_pool_event_server(client_event_bus,
                                       client_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))
        await stack.enter_async_context(
            run_peer_pool_event_server(server_event_bus,
                                       server_peer_pool,
                                       handler_type=ETHPeerPoolEventServer))

        client_proxy_peer_pool = ETHProxyPeerPool(
            client_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(run_service(client_proxy_peer_pool))

        server_proxy_peer_pool = ETHProxyPeerPool(
            server_event_bus, TO_NETWORKING_BROADCAST_CONFIG)
        await stack.enter_async_context(run_service(server_proxy_peer_pool))

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.session)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_block_headers(0,
                                                       1,
                                                       0,
                                                       False,
                                                       timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_receipts((), timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_block_bodies((), timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.eth_api.get_node_data((), timeout=0.01)
Beispiel #9
0
async def test_regular_syncer_fallback(request, event_loop, event_bus, chaindb_fresh, chaindb_20):
    """
    Test the scenario where a header comes in that's not in memory (but is in the DB)
    """
    client_peer, server_peer = await get_directly_linked_peers(
        request, event_loop,
        alice_headerdb=FakeAsyncHeaderDB(chaindb_fresh.db),
        bob_headerdb=FakeAsyncHeaderDB(chaindb_20.db))
    client = FallbackTesting_RegularChainSyncer(
        ByzantiumTestChain(chaindb_fresh.db),
        chaindb_fresh,
        MockPeerPoolWithConnectedPeers([client_peer]))
    server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer], event_bus=event_bus)

    async with run_peer_pool_event_server(
        event_bus, server_peer_pool, handler_type=ETHPeerPoolEventServer
    ), run_request_server(
        event_bus, FakeAsyncChainDB(chaindb_20.db)
    ):

        server_peer.logger.info("%s is serving 20 blocks", server_peer)
        client_peer.logger.info("%s is syncing up 20", client_peer)

        def finalizer():
            event_loop.run_until_complete(client.cancel())
            # Yield control so that client/server.run() returns, otherwise asyncio will complain.
            event_loop.run_until_complete(asyncio.sleep(0.1))
        request.addfinalizer(finalizer)

        asyncio.ensure_future(client.run())

        await wait_for_head(chaindb_fresh, chaindb_20.get_canonical_head())
        head = chaindb_fresh.get_canonical_head()
        assert head.state_root in chaindb_fresh.db
Beispiel #10
0
async def get_request_server_setup(request, event_loop, event_bus, chain_db):
    genesis = await chain_db.coro_get_canonical_block_by_slot(
        SERENITY_CONFIG.GENESIS_SLOT,
        BeaconBlock,
    )
    alice_chain_db = await get_chain_db((genesis, ))
    alice, alice_peer_pool, bob, bob_peer_pool = await get_directly_linked_peers_in_peer_pools(
        request,
        event_loop,
        alice_chain_db=alice_chain_db,
        bob_chain_db=chain_db,
        bob_peer_pool_event_bus=event_bus,
    )

    response_buffer = MsgBuffer()
    alice.add_subscriber(response_buffer)

    async with run_peer_pool_event_server(event_bus,
                                          bob_peer_pool,
                                          handler_type=BCCPeerPoolEventServer):

        bob_request_server = BCCRequestServer(event_bus,
                                              TO_NETWORKING_BROADCAST_CONFIG,
                                              bob.context.chain_db)
        asyncio.ensure_future(bob_request_server.run())

        await event_bus.wait_until_all_endpoints_subscribed_to(
            GetBeaconBlocksEvent)

        def finalizer():
            event_loop.run_until_complete(bob_request_server.cancel())

        request.addfinalizer(finalizer)

        yield alice, response_buffer
Beispiel #11
0
async def test_regular_syncer_fallback(request, event_loop, event_bus, chaindb_fresh, chaindb_20):
    """
    Test the scenario where a header comes in that's not in memory (but is in the DB)
    """
    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = LatestETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )

    async with peer_pair as (client_peer, server_peer):

        client = FallbackTesting_RegularChainSyncer(
            ByzantiumTestChain(chaindb_fresh.db),
            chaindb_fresh,
            MockPeerPoolWithConnectedPeers([client_peer], event_bus=event_bus)
        )
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer], event_bus=event_bus)

        async with run_peer_pool_event_server(
            event_bus, server_peer_pool, handler_type=ETHPeerPoolEventServer
        ), background_asyncio_service(ETHRequestServer(
            event_bus, TO_NETWORKING_BROADCAST_CONFIG, AsyncChainDB(chaindb_20.db)
        )):

            server_peer.logger.info("%s is serving 20 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 20", client_peer)

            async with background_asyncio_service(client):
                await wait_for_head(chaindb_fresh, chaindb_20.get_canonical_head())
                head = chaindb_fresh.get_canonical_head()
                assert head.state_root in chaindb_fresh.db
Beispiel #12
0
async def test_skeleton_syncer(request, event_loop, event_bus, chaindb_fresh,
                               chaindb_1000):
    client_peer, server_peer = await get_directly_linked_peers(
        request,
        event_loop,
        alice_headerdb=FakeAsyncHeaderDB(chaindb_fresh.db),
        bob_headerdb=FakeAsyncHeaderDB(chaindb_1000.db))
    client_peer_pool = MockPeerPoolWithConnectedPeers([client_peer])
    client = FastChainSyncer(LatestTestChain(chaindb_fresh.db), chaindb_fresh,
                             client_peer_pool)
    server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                      event_bus=event_bus)

    async with run_peer_pool_event_server(
            event_bus, server_peer_pool,
            handler_type=ETHPeerPoolEventServer), run_request_server(
                event_bus, FakeAsyncChainDB(chaindb_1000.db)):

        client_peer.logger.info("%s is serving 1000 blocks", client_peer)
        server_peer.logger.info("%s is syncing up 1000 blocks", server_peer)

        await asyncio.wait_for(client.run(), timeout=20)

        head = chaindb_fresh.get_canonical_head()
        assert head == chaindb_1000.get_canonical_head()

        # Now download the state for the chain's head.
        state_downloader = StateDownloader(chaindb_fresh, chaindb_fresh.db,
                                           head.state_root, client_peer_pool)
        await asyncio.wait_for(state_downloader.run(), timeout=20)

        assert head.state_root in chaindb_fresh.db
Beispiel #13
0
async def test_skeleton_syncer(request, event_loop, event_bus, chaindb_fresh,
                               chaindb_1000):

    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_1000.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client_peer_pool = MockPeerPoolWithConnectedPeers([client_peer],
                                                          event_bus=event_bus)
        client = FastChainSyncer(LatestTestChain(chaindb_fresh.db),
                                 chaindb_fresh, client_peer_pool)
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_request_server(
                    event_bus, AsyncChainDB(chaindb_1000.db)):

            client_peer.logger.info("%s is serving 1000 blocks", client_peer)
            server_peer.logger.info("%s is syncing up 1000 blocks",
                                    server_peer)

            await asyncio.wait_for(client.run(), timeout=20)

            head = chaindb_fresh.get_canonical_head()
            assert head == chaindb_1000.get_canonical_head()
Beispiel #14
0
async def test_peer_pool_answers_connect_commands(event_bus, server, receiver_remote):
    # This is the PeerPool which will accept our message and try to connect to {server}
    initiator_peer_pool = ParagonPeerPool(
        privkey=INITIATOR_PRIVKEY,
        context=ParagonContext(),
        event_bus=event_bus,
    )
    async with background_asyncio_service(initiator_peer_pool) as manager:
        await manager.wait_started()
        async with run_peer_pool_event_server(event_bus, initiator_peer_pool):

            assert len(server.peer_pool.connected_nodes) == 0

            await event_bus.wait_until_any_endpoint_subscribed_to(ConnectToNodeCommand)
            await event_bus.broadcast(
                ConnectToNodeCommand(receiver_remote),
                TO_NETWORKING_BROADCAST_CONFIG
            )

            # This test was maybe 30% flaky at 0.1 sleep, so wait in a loop.
            for _ in range(5):
                await asyncio.sleep(0.1)
                if len(server.peer_pool.connected_nodes) == 1:
                    break
            else:
                assert len(server.peer_pool.connected_nodes) == 1
Beispiel #15
0
async def test_light_syncer(request, event_loop, event_bus, chaindb_fresh,
                            chaindb_20):
    client_peer, server_peer = await get_directly_linked_peers(
        request,
        event_loop,
        alice_peer_class=LESPeer,
        alice_headerdb=FakeAsyncHeaderDB(chaindb_fresh.db),
        bob_headerdb=FakeAsyncHeaderDB(chaindb_20.db))
    client = LightChainSyncer(LatestTestChain(chaindb_fresh.db), chaindb_fresh,
                              MockPeerPoolWithConnectedPeers([client_peer]))
    server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                      event_bus=event_bus)

    async with run_peer_pool_event_server(
            event_bus, server_peer_pool,
            handler_type=LESPeerPoolEventServer), run_request_server(
                event_bus,
                FakeAsyncChainDB(chaindb_20.db),
                server_type=LightRequestServer):

        server_peer.logger.info("%s is serving 20 blocks", server_peer)
        client_peer.logger.info("%s is syncing up 20", client_peer)

        def finalizer():
            event_loop.run_until_complete(client.cancel())
            # Yield control so that client/server.run() returns, otherwise asyncio will complain.
            event_loop.run_until_complete(asyncio.sleep(0.1))

        request.addfinalizer(finalizer)

        asyncio.ensure_future(client.run())

        await wait_for_head(chaindb_fresh, chaindb_20.get_canonical_head())
Beispiel #16
0
async def test_fast_syncer(request, event_loop, event_bus, chaindb_fresh,
                           chaindb_1000):

    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_1000.db)
    peer_pair = LatestETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client_peer_pool = MockPeerPoolWithConnectedPeers([client_peer],
                                                          event_bus=event_bus)
        client = FastChainSyncer(LatestTestChain(chaindb_fresh.db),
                                 chaindb_fresh, client_peer_pool)
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=ETHPeerPoolEventServer
        ), background_asyncio_service(
                ETHRequestServer(event_bus, TO_NETWORKING_BROADCAST_CONFIG,
                                 AsyncChainDB(chaindb_1000.db))):

            client_peer.logger.info("%s is serving 1000 blocks", client_peer)
            server_peer.logger.info("%s is syncing up 1000 blocks",
                                    server_peer)

            async with background_asyncio_service(client) as manager:
                await asyncio.wait_for(manager.wait_finished(), timeout=20)

            head = chaindb_fresh.get_canonical_head()
            assert head == chaindb_1000.get_canonical_head()
Beispiel #17
0
async def test_light_syncer(request, event_loop, event_bus, chaindb_fresh,
                            chaindb_20):
    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = LESV2PeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client = LightChainSyncer(
            LatestTestChain(chaindb_fresh.db), chaindb_fresh,
            MockPeerPoolWithConnectedPeers([client_peer], event_bus=event_bus))
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=LESPeerPoolEventServer
        ), background_asyncio_service(
                LightRequestServer(
                    event_bus,
                    TO_NETWORKING_BROADCAST_CONFIG,
                    AsyncChainDB(chaindb_20.db),
                )):

            server_peer.logger.info("%s is serving 20 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 20", client_peer)

            async with background_asyncio_service(client):
                await wait_for_head(chaindb_fresh,
                                    chaindb_20.get_canonical_head())
Beispiel #18
0
async def test_header_gapfill_syncer(request,
                                     event_loop,
                                     event_bus,
                                     chaindb_with_gaps,
                                     chaindb_1000):

    client_context = ChainContextFactory(headerdb__db=chaindb_with_gaps.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_1000.db)
    peer_pair = LatestETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client = HeaderChainGapSyncer(
            LatestTestChain(chaindb_with_gaps.db),
            chaindb_with_gaps,
            MockPeerPoolWithConnectedPeers([client_peer], event_bus=event_bus)
        )
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer], event_bus=event_bus)

        async with run_peer_pool_event_server(
            event_bus, server_peer_pool, handler_type=ETHPeerPoolEventServer
        ), background_asyncio_service(ETHRequestServer(
            event_bus, TO_NETWORKING_BROADCAST_CONFIG, AsyncChainDB(chaindb_1000.db),
        )):

            server_peer.logger.info("%s is serving 1000 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 1000", client_peer)

            async with background_asyncio_service(client):
                await wait_for_head(
                    # We check for 249 because 250 exists from the very beginning (the checkpoint)
                    chaindb_with_gaps, chaindb_1000.get_canonical_block_header_by_number(249))
Beispiel #19
0
async def test_peer_pool_answers_connect_commands(event_loop, event_bus, server, receiver_remote):
    # This is the PeerPool which will accept our message and try to connect to {server}
    initiator_peer_pool = ParagonPeerPool(
        privkey=INITIATOR_PRIVKEY,
        context=ParagonContext(),
        event_bus=event_bus,
    )
    asyncio.ensure_future(initiator_peer_pool.run(), loop=event_loop)
    await initiator_peer_pool.events.started.wait()
    async with run_peer_pool_event_server(
        event_bus,
        initiator_peer_pool,
    ):

        assert len(server.peer_pool.connected_nodes) == 0

        await event_bus.wait_until_any_endpoint_subscribed_to(ConnectToNodeCommand)
        await event_bus.broadcast(
            ConnectToNodeCommand(receiver_remote),
            TO_NETWORKING_BROADCAST_CONFIG
        )

        # This test was maybe 30% flaky at 0.1 sleep
        await asyncio.sleep(0.2)

        assert len(server.peer_pool.connected_nodes) == 1

        await initiator_peer_pool.cancel()
Beispiel #20
0
async def test_proxy_peer_requests_with_timeouts(request, event_bus,
                                                 other_event_bus, event_loop,
                                                 chaindb_fresh, chaindb_20):

    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = await get_directly_linked_peers(
        request,
        event_loop,
        alice_headerdb=FakeAsyncChainDB(chaindb_fresh.db),
        bob_headerdb=FakeAsyncChainDB(chaindb_20.db),
    )

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with run_peer_pool_event_server(
            client_event_bus,
            client_peer_pool,
            handler_type=ETHPeerPoolEventServer), run_peer_pool_event_server(
                server_event_bus,
                server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_proxy_peer_pool(
                    client_event_bus
                ) as client_proxy_peer_pool, run_proxy_peer_pool(
                    server_event_bus):

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.remote)

        with pytest.raises(TimeoutError):
            await proxy_peer.requests.get_block_headers(0,
                                                        1,
                                                        0,
                                                        False,
                                                        timeout=0.01)

        with pytest.raises(TimeoutError):
            await proxy_peer.requests.get_receipts((), timeout=0.01)

        with pytest.raises(TimeoutError):
            await proxy_peer.requests.get_block_bodies((), timeout=0.01)

        with pytest.raises(TimeoutError):
            await proxy_peer.requests.get_node_data((), timeout=0.01)
Beispiel #21
0
async def test_queening_queue_recovers_from_penalty_with_one_peer(
        event_bus, chaindb_fresh, chaindb_20, has_parallel_peasant_call):

    local_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    remote_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = LatestETHPeerPairFactory(
        alice_peer_context=local_context,
        bob_peer_context=remote_context,
        event_bus=event_bus,
    )
    async with peer_pair as (connection_to_local, connection_to_remote):

        local_peer_pool = MockPeerPoolWithConnectedPeers(
            [connection_to_remote],
            event_bus=event_bus,
        )

        async with run_peer_pool_event_server(
                event_bus, local_peer_pool, handler_type=ETHPeerPoolEventServer
        ), background_asyncio_service(
                ETHRequestServer(
                    event_bus,
                    TO_NETWORKING_BROADCAST_CONFIG,
                    AsyncChainDB(chaindb_20.db),
                )):
            queue = QueeningQueue(local_peer_pool)

            async with background_asyncio_service(queue):
                queen = await asyncio.wait_for(queue.get_queen_peer(),
                                               timeout=0.01)
                assert queen == connection_to_remote

                queue.penalize_queen(connection_to_remote, delay=0.1)
                assert queue.queen is None
                with pytest.raises(asyncio.TimeoutError):
                    # The queen should be penalized for this entire period, and
                    #   there are no alternative peers, so this call should hang:
                    await asyncio.wait_for(queue.get_queen_peer(),
                                           timeout=0.05)

                if has_parallel_peasant_call:
                    waiting_on_peasant = asyncio.ensure_future(
                        queue.pop_fastest_peasant())

                # But after waiting long enough, even with just one peer, the blocking
                #   call should return. Whether or not there is also a waiting call looking for
                #   a peasant.
                final_queen = await asyncio.wait_for(queue.get_queen_peer(),
                                                     timeout=0.075)
                assert final_queen == connection_to_remote

                if has_parallel_peasant_call:
                    waiting_on_peasant.cancel()
                    with pytest.raises(asyncio.CancelledError):
                        await waiting_on_peasant
Beispiel #22
0
async def test_proxy_peer_requests_with_timeouts(request, event_bus,
                                                 other_event_bus, event_loop,
                                                 client_and_server):

    server_event_bus = event_bus
    client_event_bus = other_event_bus
    client_peer, server_peer = client_and_server

    client_peer_pool = MockPeerPoolWithConnectedPeers(
        [client_peer], event_bus=client_event_bus)
    server_peer_pool = MockPeerPoolWithConnectedPeers(
        [server_peer], event_bus=server_event_bus)

    async with run_peer_pool_event_server(
            client_event_bus,
            client_peer_pool,
            handler_type=ETHPeerPoolEventServer), run_peer_pool_event_server(
                server_event_bus,
                server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_proxy_peer_pool(
                    client_event_bus
                ) as client_proxy_peer_pool, run_proxy_peer_pool(
                    server_event_bus):

        proxy_peer = await client_proxy_peer_pool.ensure_proxy_peer(
            client_peer.session)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.requests.get_block_headers(0,
                                                        1,
                                                        0,
                                                        False,
                                                        timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.requests.get_receipts((), timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.requests.get_block_bodies((), timeout=0.01)

        with pytest.raises(asyncio.TimeoutError):
            await proxy_peer.requests.get_node_data((), timeout=0.01)
Beispiel #23
0
async def test_event_bus_requests_against_peer_pool(request, event_loop,
                                                    event_bus):
    async with ParagonPeerPairFactory() as (alice, bob):
        peer_pool = ParagonMockPeerPoolWithConnectedPeers([alice, bob])
        async with run_peer_pool_event_server(event_bus, peer_pool):

            await event_bus.wait_until_any_endpoint_subscribed_to(
                PeerCountRequest)

            res = await event_bus.request(PeerCountRequest())

            assert res.peer_count == 2
Beispiel #24
0
async def test_header_gap_fill_detects_invalid_attempt(caplog,
                                                       event_loop,
                                                       event_bus,
                                                       chaindb_with_gaps,
                                                       chaindb_1000,
                                                       chaindb_uncle):

    client_context = ChainContextFactory(headerdb__db=chaindb_with_gaps.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_uncle.db)
    peer_pair = LatestETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client = SequentialHeaderChainGapSyncer(
            LatestTestChain(chaindb_with_gaps.db),
            chaindb_with_gaps,
            MockPeerPoolWithConnectedPeers([client_peer], event_bus=event_bus)
        )
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer], event_bus=event_bus)
        uncle_chaindb = AsyncChainDB(chaindb_uncle.db)
        async with run_peer_pool_event_server(
            event_bus, server_peer_pool, handler_type=ETHPeerPoolEventServer
        ), background_asyncio_service(ETHRequestServer(
            event_bus, TO_NETWORKING_BROADCAST_CONFIG, uncle_chaindb,
        )):

            server_peer.logger.info("%s is serving 1000 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 1000", client_peer)

            # We check for 499 because 500 exists from the very beginning (the checkpoint)
            expected_block_number = 499
            async with background_asyncio_service(client):
                try:
                    await wait_for_head(
                        chaindb_with_gaps,
                        chaindb_1000.get_canonical_block_header_by_number(expected_block_number),
                        sync_timeout=5,
                    )
                except asyncio.TimeoutError:
                    assert "Attempted to fill gap with invalid header" in caplog.text
                    # Monkey patch the uncle chaindb to effectively make the attacker peer
                    # switch to the correct chain.
                    uncle_chaindb.db = chaindb_1000.db
                    await wait_for_head(
                        chaindb_with_gaps,
                        chaindb_1000.get_canonical_block_header_by_number(expected_block_number)
                    )
                else:
                    raise AssertionError("Succeeded when it was expected to fail")
async def test_event_bus_requests_against_peer_pool(request, event_loop,
                                                    event_bus):

    alice, bob = await get_directly_linked_peers(request, event_loop)
    peer_pool = ParagonMockPeerPoolWithConnectedPeers([alice, bob])
    async with run_peer_pool_event_server(event_bus, peer_pool):

        await event_bus.wait_until_any_connection_subscribed_to(
            PeerCountRequest)

        res = await event_bus.request(PeerCountRequest())

        assert res.peer_count == 2
Beispiel #26
0
async def get_peer_and_receive_server(
    request, event_loop, event_bus
) -> Tuple[BCCPeer, BCCRequestServer, BCCReceiveServer, asyncio.Queue]:
    alice_chain = await get_fake_chain()
    bob_chain = await get_fake_chain()

    (alice, alice_peer_pool, bob, bob_peer_pool
     ) = await bcc_helpers.get_directly_linked_peers_in_peer_pools(
         request,
         event_loop,
         alice_chain_db=alice_chain.chaindb,
         bob_chain_db=bob_chain.chaindb,
     )

    msg_queue = asyncio.Queue()
    orig_handle_msg = BCCReceiveServer._handle_msg

    # Inject a queue to each `BCCReceiveServer`, which puts the message passed to `_handle_msg` to
    # the queue, right after every `_handle_msg` finishes.
    # This is crucial to make the test be able to wait until `_handle_msg` finishes.
    async def _handle_msg(self, base_peer, cmd, msg):
        task = asyncio.ensure_future(orig_handle_msg(self, base_peer, cmd,
                                                     msg))

        def enqueue_msg(future, msg):
            msg_queue.put_nowait(msg)

        task.add_done_callback(functools.partial(enqueue_msg, msg=msg))
        await task

    BCCReceiveServer._handle_msg = _handle_msg

    async with run_peer_pool_event_server(
            event_bus, alice_peer_pool,
            BCCPeerPoolEventServer), run_request_server(
                event_bus, alice_chain.chaindb,
                server_type=BCCRequestServer) as alice_req_server:

        bob_recv_server = BCCReceiveServer(chain=bob_chain,
                                           peer_pool=bob_peer_pool)

        asyncio.ensure_future(bob_recv_server.run())
        await bob_recv_server.events.started.wait()

        def finalizer():
            event_loop.run_until_complete(bob_recv_server.cancel())

        request.addfinalizer(finalizer)

        yield alice, alice_req_server, bob_recv_server, msg_queue
Beispiel #27
0
async def test_admin_removePeer_for_not_connected_peers(
        jsonrpc_ipc_pipe_path, event_loop, event_bus, ipc_server):

    async with LatestETHPeerPairFactory() as (alice, bob):
        peer_pool = MockPeerPoolWithConnectedPeers([alice, bob],
                                                   event_bus=event_bus)

        async with run_peer_pool_event_server(event_bus, peer_pool):
            enode = f'enode://{GOOD_KEY}@10.0.0.1:30303'
            request = build_request('admin_removePeer', [enode])

            result = await get_ipc_response(jsonrpc_ipc_pipe_path, request,
                                            event_loop, event_bus)
            assert result == {'id': 3, 'jsonrpc': '2.0', 'result': False}
Beispiel #28
0
async def test_regular_syncer_fallback(request, event_loop, event_bus,
                                       chaindb_fresh, chaindb_20):
    """
    Test the scenario where a header comes in that's not in memory (but is in the DB)
    """
    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )

    async with peer_pair as (client_peer, server_peer):

        client = FallbackTesting_RegularChainSyncer(
            ByzantiumTestChain(chaindb_fresh.db), chaindb_fresh,
            MockPeerPoolWithConnectedPeers([client_peer], event_bus=event_bus))
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=ETHPeerPoolEventServer
        ), background_asyncio_service(
                ETHRequestServer(event_bus, TO_NETWORKING_BROADCAST_CONFIG,
                                 AsyncChainDB(chaindb_20.db))):

            server_peer.logger.info("%s is serving 20 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 20", client_peer)

            def finalizer():
                event_loop.run_until_complete(client.cancel())
                # Yield control so that client/server.run() returns, otherwise
                # asyncio will complain.
                event_loop.run_until_complete(asyncio.sleep(0.1))

            request.addfinalizer(finalizer)

            asyncio.ensure_future(client.run())

            await wait_for_head(chaindb_fresh, chaindb_20.get_canonical_head())
            head = chaindb_fresh.get_canonical_head()
            assert head.state_root in chaindb_fresh.db
Beispiel #29
0
async def test_fast_syncer(request, event_bus, event_loop, chaindb_fresh,
                           chaindb_20):
    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client_peer_pool = MockPeerPoolWithConnectedPeers([client_peer])
        client = FastChainSyncer(LatestTestChain(chaindb_fresh.db),
                                 chaindb_fresh, client_peer_pool)
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus,
                server_peer_pool,
                handler_type=ETHPeerPoolEventServer,
        ), run_request_server(
                event_bus,
                AsyncChainDB(chaindb_20.db),
        ):

            server_peer.logger.info("%s is serving 20 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 20", client_peer)

            # FastChainSyncer.run() will return as soon as it's caught up with the peer.
            await asyncio.wait_for(client.run(), timeout=5)

            head = chaindb_fresh.get_canonical_head()
            assert head == chaindb_20.get_canonical_head()

            # Now download the state for the chain's head.
            state_downloader = StateDownloader(chaindb_fresh, chaindb_fresh.db,
                                               head.state_root,
                                               client_peer_pool)
            await asyncio.wait_for(state_downloader.run(), timeout=5)

            assert head.state_root in chaindb_fresh.db
Beispiel #30
0
async def get_sync_setup(request,
                         event_loop,
                         event_bus,
                         alice_chain_db,
                         bob_chain_db,
                         genesis_config=SERENITY_GENESIS_CONFIG):
    alice_context = BeaconContextFactory(chain_db=alice_chain_db)
    bob_context = BeaconContextFactory(chain_db=bob_chain_db)
    peer_pair = BCCPeerPairFactory(
        alice_peer_context=alice_context,
        bob_peer_context=bob_context,
        event_bus=event_bus,
    )
    async with peer_pair as (alice, bob):
        async with BCCPeerPoolFactory.run_for_peer(
                alice) as alice_peer_pool, BCCPeerPoolFactory.run_for_peer(
                    bob) as bob_peer_pool:  # noqa: E501

            bob_request_server = BCCRequestServer(
                event_bus, TO_NETWORKING_BROADCAST_CONFIG,
                bob.context.chain_db)

            alice_syncer = BeaconChainSyncer(
                alice_chain_db,
                alice_peer_pool,
                SimpleWriterBlockImporter(alice_chain_db),
                genesis_config,
            )
            async with run_peer_pool_event_server(
                    event_bus, bob_peer_pool,
                    handler_type=BCCPeerPoolEventServer):

                asyncio.ensure_future(bob_request_server.run())
                asyncio.ensure_future(alice_syncer.run())

                def finalizer():
                    event_loop.run_until_complete(alice_syncer.cancel())
                    event_loop.run_until_complete(bob_request_server.cancel())

                request.addfinalizer(finalizer)
                await alice_syncer.events.finished.wait()
                yield alice_syncer