示例#1
0
文件: test_peer.py 项目: onyb/trinity
async def test_peer_pool_iter(event_loop):
    factory_a = ETHPeerPairFactory()
    factory_b = ETHPeerPairFactory()
    factory_c = ETHPeerPairFactory()
    async with factory_a as (peer1, _), factory_b as (peer2,
                                                      _), factory_c as (peer3,
                                                                        _):
        pool = MockPeerPoolWithConnectedPeers([peer1, peer2, peer3])
        peers = list([peer async for peer in pool])

        assert len(peers) == 3
        assert peer1 in peers
        assert peer2 in peers
        assert peer3 in peers

        peers = []
        asyncio.ensure_future(
            peer2.disconnect(DisconnectReason.DISCONNECT_REQUESTED))
        async for peer in pool:
            peers.append(peer)

        assert len(peers) == 2
        assert peer1 in peers
        assert peer2 not in peers
        assert peer3 in peers
示例#2
0
async def test_ETH_peers():
    async with ETHPeerPairFactory() as (alice, bob):
        assert isinstance(alice, ETHPeer)
        assert isinstance(bob, ETHPeer)

        assert isinstance(alice.sub_proto, ETHProtocol)
        assert isinstance(bob.sub_proto, ETHProtocol)
示例#3
0
async def test_skeleton_syncer(request, event_loop, event_bus, chaindb_fresh,
                               chaindb_1000):

    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_1000.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client_peer_pool = MockPeerPoolWithConnectedPeers([client_peer],
                                                          event_bus=event_bus)
        client = FastChainSyncer(LatestTestChain(chaindb_fresh.db),
                                 chaindb_fresh, client_peer_pool)
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_request_server(
                    event_bus, AsyncChainDB(chaindb_1000.db)):

            client_peer.logger.info("%s is serving 1000 blocks", client_peer)
            server_peer.logger.info("%s is syncing up 1000 blocks",
                                    server_peer)

            await asyncio.wait_for(client.run(), timeout=20)

            head = chaindb_fresh.get_canonical_head()
            assert head == chaindb_1000.get_canonical_head()
示例#4
0
async def client_and_server():
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=ChainContextFactory(),
        bob_peer_context=ChainContextFactory(),
    )
    async with peer_pair as (client_peer, server_peer):
        yield client_peer, server_peer
示例#5
0
async def client_and_server(chaindb_fresh, chaindb_20):
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=ChainContextFactory(headerdb__db=chaindb_fresh.db),
        bob_peer_context=ChainContextFactory(headerdb__db=chaindb_20.db),
    )
    async with peer_pair as (client_peer, server_peer):
        yield client_peer, server_peer
示例#6
0
async def alice_and_bob(alice_chain, bob_chain):
    pair_factory = ETHPeerPairFactory(
        alice_client_version='alice',
        alice_peer_context=ChainContextFactory(headerdb=AsyncHeaderDB(alice_chain.headerdb.db)),
        bob_client_version='bob',
        bob_peer_context=ChainContextFactory(headerdb=AsyncHeaderDB(bob_chain.headerdb.db)),
    )
    async with pair_factory as (alice, bob):
        yield alice, bob
示例#7
0
async def test_handshake_with_incompatible_fork_id(alice_chain, bob_chain):

    alice_chain = build(alice_chain, mine_block())

    pair_factory = ETHPeerPairFactory(alice_peer_context=ChainContextFactory(
        headerdb=AsyncHeaderDB(alice_chain.headerdb.db),
        vm_configuration=((1, PetersburgVM), (2, MuirGlacierVM))), )
    with pytest.raises(WrongForkIDFailure):
        async with pair_factory as (alice, bob):
            pass
示例#8
0
文件: test_peer.py 项目: onyb/trinity
async def test_remote_dao_fork_validation_skipped_on_eth64(monkeypatch):
    dao_fork_validator_called = False

    async def validate_remote_dao_fork_block():
        nonlocal dao_fork_validator_called
        dao_fork_validator_called = True

    async with ETHPeerPairFactory() as (alice, _):
        boot_manager = alice.get_boot_manager()
        monkeypatch.setattr(boot_manager, 'validate_remote_dao_fork_block',
                            validate_remote_dao_fork_block)
        await boot_manager.run()
        assert not dao_fork_validator_called
示例#9
0
async def test_regular_syncer_fallback(request, event_loop, event_bus,
                                       chaindb_fresh, chaindb_20):
    """
    Test the scenario where a header comes in that's not in memory (but is in the DB)
    """
    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )

    async with peer_pair as (client_peer, server_peer):

        client = FallbackTesting_RegularChainSyncer(
            ByzantiumTestChain(chaindb_fresh.db), chaindb_fresh,
            MockPeerPoolWithConnectedPeers([client_peer], event_bus=event_bus))
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=ETHPeerPoolEventServer
        ), background_asyncio_service(
                ETHRequestServer(event_bus, TO_NETWORKING_BROADCAST_CONFIG,
                                 AsyncChainDB(chaindb_20.db))):

            server_peer.logger.info("%s is serving 20 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 20", client_peer)

            def finalizer():
                event_loop.run_until_complete(client.cancel())
                # Yield control so that client/server.run() returns, otherwise
                # asyncio will complain.
                event_loop.run_until_complete(asyncio.sleep(0.1))

            request.addfinalizer(finalizer)

            asyncio.ensure_future(client.run())

            await wait_for_head(chaindb_fresh, chaindb_20.get_canonical_head())
            head = chaindb_fresh.get_canonical_head()
            assert head.state_root in chaindb_fresh.db
示例#10
0
async def test_fast_syncer(request, event_bus, event_loop, chaindb_fresh,
                           chaindb_20):
    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        client_peer_pool = MockPeerPoolWithConnectedPeers([client_peer])
        client = FastChainSyncer(LatestTestChain(chaindb_fresh.db),
                                 chaindb_fresh, client_peer_pool)
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus,
                server_peer_pool,
                handler_type=ETHPeerPoolEventServer,
        ), run_request_server(
                event_bus,
                AsyncChainDB(chaindb_20.db),
        ):

            server_peer.logger.info("%s is serving 20 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 20", client_peer)

            # FastChainSyncer.run() will return as soon as it's caught up with the peer.
            await asyncio.wait_for(client.run(), timeout=5)

            head = chaindb_fresh.get_canonical_head()
            assert head == chaindb_20.get_canonical_head()

            # Now download the state for the chain's head.
            state_downloader = StateDownloader(chaindb_fresh, chaindb_fresh.db,
                                               head.state_root,
                                               client_peer_pool)
            await asyncio.wait_for(state_downloader.run(), timeout=5)

            assert head.state_root in chaindb_fresh.db
示例#11
0
async def test_eth_get_headers_stats():
    async with ETHPeerPairFactory() as (peer, remote):

        async def send_headers():
            remote.sub_proto.send_block_headers(mk_header_chain(1))

        for idx in range(1, 5):
            get_headers_task = asyncio.ensure_future(
                peer.requests.get_block_headers(0, 1, 0, False))
            asyncio.ensure_future(send_headers())

            await get_headers_task

            stats = peer.requests.get_stats()

            assert stats['BlockHeaders'].startswith(
                'msgs={0}  items={0}  rtt='.format(idx))
            assert 'timeouts=0' in stats['BlockHeaders']
            assert 'quality=' in stats['BlockHeaders']
            assert 'ips=' in stats['BlockHeaders']
示例#12
0
async def test_eth_get_headers_stats():
    async with ETHPeerPairFactory() as (peer, remote):

        async def send_headers():
            remote.eth_api.send_block_headers(mk_header_chain(1))

        for idx in range(1, 5):
            get_headers_task = asyncio.ensure_future(
                peer.eth_api.get_block_headers(0, 1, 0, False))
            asyncio.ensure_future(send_headers())

            await get_headers_task

            stats = peer.eth_api.get_extra_stats()

            for line in stats:
                if 'BlockHeaders' in line:
                    assert 'msgs={0}  items={0}  rtt='.format(idx) in line
                    assert 'timeouts=0' in line
                    assert 'quality=' in line
                    assert 'ips=' in line
示例#13
0
async def test_admin_peers(jsonrpc_ipc_pipe_path, event_loop, event_bus,
                           ipc_server):

    async with ETHPeerPairFactory() as (alice, bob):
        peer_pool = MockPeerPoolWithConnectedPeers([alice, bob],
                                                   event_bus=event_bus)

        async with run_peer_pool_event_server(event_bus, peer_pool):

            request = build_request('admin_peers')

            result = await get_ipc_response(jsonrpc_ipc_pipe_path, request,
                                            event_loop, event_bus)

            peers = result['result']
            json_bob = peers[0]
            json_alice = peers[1]

            def to_remote_address(session):
                return f"{session.remote.address.ip}:{session.remote.address.tcp_port}"

            assert json_bob['caps'] == ['eth/63', 'eth/64']
            assert json_bob['enode'] == alice.connection.session.remote.uri()
            assert json_bob['id'] == str(alice.connection.session.id)
            assert json_bob['name'] == 'bob'
            bob_network = json_bob['network']
            assert not bob_network['inbound']
            assert bob_network['localAddress'] == '0.0.0.0:30303'
            assert bob_network['remoteAddress'] == to_remote_address(
                alice.connection.session)

            assert json_alice['caps'] == ['eth/63', 'eth/64']
            assert json_alice['enode'] == bob.connection.session.remote.uri()
            assert json_alice['id'] == str(bob.connection.session.id)
            assert json_alice['name'] == 'alice'
            alice_network = json_alice['network']
            assert alice_network['inbound']
            assert alice_network['localAddress'] == '0.0.0.0:30303'
            assert alice_network['remoteAddress'] == to_remote_address(
                bob.connection.session)
示例#14
0
async def test_regular_syncer(request, event_loop, event_bus, chaindb_fresh,
                              chaindb_20):
    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_20.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )

    async with peer_pair as (client_peer, server_peer):

        client = RegularChainSyncer(
            ByzantiumTestChain(chaindb_fresh.db), chaindb_fresh,
            MockPeerPoolWithConnectedPeers([client_peer]))
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_request_server(
                    event_bus, AsyncChainDB(chaindb_20.db)):

            server_peer.logger.info("%s is serving 20 blocks", server_peer)
            client_peer.logger.info("%s is syncing up 20", client_peer)

            def finalizer():
                event_loop.run_until_complete(client.cancel())
                # Yield control so that client/server.run() returns, otherwise
                # asyncio will complain.
                event_loop.run_until_complete(asyncio.sleep(0.1))

            request.addfinalizer(finalizer)

            asyncio.ensure_future(client.run())

            await wait_for_head(chaindb_fresh, chaindb_20.get_canonical_head())
            head = chaindb_fresh.get_canonical_head()
            assert head.state_root in chaindb_fresh.db
示例#15
0
async def test_eth_get_headers_empty_stats():
    async with ETHPeerPairFactory() as (peer, remote):
        stats = peer.eth_api.get_extra_stats()
        assert all('None' in line for line in stats)
        assert any('BlockHeader' in line for line in stats)
示例#16
0
async def test_eth_get_headers_empty_stats():
    async with ETHPeerPairFactory() as (peer, remote):
        stats = peer.requests.get_stats()
        assert all(status == 'None' for status in stats.values())
        assert 'BlockHeaders' in stats.keys()
示例#17
0
async def test_beam_syncer(request,
                           event_loop,
                           event_bus,
                           chaindb_fresh,
                           chaindb_churner,
                           beam_to_block,
                           checkpoint=None):

    client_context = ChainContextFactory(headerdb__db=chaindb_fresh.db)
    server_context = ChainContextFactory(headerdb__db=chaindb_churner.db)
    peer_pair = ETHPeerPairFactory(
        alice_peer_context=client_context,
        bob_peer_context=server_context,
        event_bus=event_bus,
    )
    async with peer_pair as (client_peer, server_peer):

        # Need a name that will be unique per xdist-process, otherwise
        #   lahja IPC endpoints in each process will clobber each other
        unique_process_name = uuid.uuid4()

        # manually add endpoint for beam vm to make requests
        pausing_config = ConnectionConfig.from_name(
            f"PausingEndpoint-{unique_process_name}")

        # manually add endpoint for trie data gatherer to serve requests
        gatherer_config = ConnectionConfig.from_name(
            f"GathererEndpoint-{unique_process_name}")

        client_peer_pool = MockPeerPoolWithConnectedPeers([client_peer])
        server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer],
                                                          event_bus=event_bus)

        async with run_peer_pool_event_server(
                event_bus, server_peer_pool,
                handler_type=ETHPeerPoolEventServer), run_request_server(
                    event_bus, AsyncChainDB(chaindb_churner.db)
                ), AsyncioEndpoint.serve(
                    pausing_config) as pausing_endpoint, AsyncioEndpoint.serve(
                        gatherer_config) as gatherer_endpoint:

            client_chain = make_pausing_beam_chain(
                ((0, PetersburgVM), ),
                chain_id=999,
                db=chaindb_fresh.db,
                event_bus=pausing_endpoint,
                loop=event_loop,
            )

            client = BeamSyncer(
                client_chain,
                chaindb_fresh.db,
                AsyncChainDB(chaindb_fresh.db),
                client_peer_pool,
                gatherer_endpoint,
                force_beam_block_number=beam_to_block,
                checkpoint=checkpoint,
            )

            client_peer.logger.info("%s is serving churner blocks",
                                    client_peer)
            server_peer.logger.info("%s is syncing up churner blocks",
                                    server_peer)

            import_server = BlockImportServer(
                pausing_endpoint,
                client_chain,
                token=client.cancel_token,
            )
            asyncio.ensure_future(import_server.run())

            await pausing_endpoint.connect_to_endpoints(gatherer_config)
            asyncio.ensure_future(client.run())

            # We can sync at least 10 blocks in 1s at current speeds, (or reach the current one)
            # Trying to keep the tests short-ish. A fuller test could always set the target header
            #   to the chaindb_churner canonical head, and increase the timeout significantly
            target_block_number = min(beam_to_block + 10, 129)
            target_head = chaindb_churner.get_canonical_block_header_by_number(
                target_block_number)
            await wait_for_head(chaindb_fresh, target_head, sync_timeout=10)
            assert target_head.state_root in chaindb_fresh.db

            # first stop the import server, so it doesn't hang waiting for state data
            await import_server.cancel()
            await client.cancel()
示例#18
0
async def eth_peer_and_remote():
    async with ETHPeerPairFactory() as (peer, remote):
        yield peer, remote