def __init__(self, event_bus: EndpointAPI, trinity_config: TrinityConfig) -> None: super().__init__(event_bus, trinity_config) self._nodekey = trinity_config.nodekey self._port = trinity_config.port self._max_peers = trinity_config.max_peers self._peer_chain = LightPeerChain( self.headerdb, cast(LESPeerPool, self.get_peer_pool()), token=self.cancel_token, )
def __init__(self, plugin_manager: PluginManager, chain_config: ChainConfig) -> None: super().__init__(plugin_manager, chain_config) self._network_id = chain_config.network_id self._nodekey = chain_config.nodekey self._port = chain_config.port self._max_peers = chain_config.max_peers self._bootstrap_nodes = chain_config.bootstrap_nodes self._preferred_nodes = chain_config.preferred_nodes self._use_discv5 = chain_config.use_discv5 self._peer_chain = LightPeerChain(self.headerdb, self.get_peer_pool(), self.cancel_token) self.notify_resource_available()
def __init__(self, event_bus: EndpointAPI, metrics_service: MetricsServiceAPI, trinity_config: TrinityConfig) -> None: super().__init__(event_bus, metrics_service, trinity_config) self._nodekey = trinity_config.nodekey self._port = trinity_config.port self._max_peers = trinity_config.max_peers self._peer_chain = LightPeerChain( self.headerdb, cast(LESPeerPool, self.get_peer_pool()), )
def __init__(self, event_bus: Endpoint, trinity_config: TrinityConfig) -> None: super().__init__(event_bus, trinity_config) self._nodekey = trinity_config.nodekey self._port = trinity_config.port self._max_peers = trinity_config.max_peers self._bootstrap_nodes = trinity_config.bootstrap_nodes self._preferred_nodes = trinity_config.preferred_nodes self._use_discv5 = trinity_config.use_discv5 self._peer_chain = LightPeerChain( self.headerdb, cast(LESPeerPool, self.get_peer_pool()), token=self.cancel_token, )
def __init__(self, plugin_manager: PluginManager, trinity_config: TrinityConfig) -> None: super().__init__(plugin_manager, trinity_config) self._nodekey = trinity_config.nodekey self._port = trinity_config.port self._max_peers = trinity_config.max_peers self._bootstrap_nodes = trinity_config.bootstrap_nodes self._preferred_nodes = trinity_config.preferred_nodes self._use_discv5 = trinity_config.use_discv5 self._peer_chain = LightPeerChain( self.headerdb, cast(LESPeerPool, self.get_peer_pool()), token=self.cancel_token, ) self.notify_resource_available()
def __init__(self, plugin_manager: PluginManager, chain_config: ChainConfig) -> None: super().__init__(plugin_manager, chain_config) self.network_id = chain_config.network_id self.nodekey = chain_config.nodekey self._port = chain_config.port self._discovery_proto = PreferredNodeDiscoveryProtocol( chain_config.nodekey, Address('0.0.0.0', chain_config.port, chain_config.port), bootstrap_nodes=chain_config.bootstrap_nodes, preferred_nodes=chain_config.preferred_nodes, ) self._peer_pool = self._create_peer_pool(chain_config) self._discovery = DiscoveryService( self._discovery_proto, self._peer_pool, self.cancel_token) self._peer_chain = LightPeerChain(self.headerdb, self._peer_pool, self.cancel_token) self.notify_resource_available()
async def test_lightchain_integration(request, event_loop, caplog, geth_ipc_path, enode, geth_process): """Test LightChainSyncer/LightPeerChain against a running geth instance. In order to run this manually, you can use `tox -e py36-lightchain_integration` or: pytest --integration --capture=no tests/integration/test_lightchain_integration.py The fixture for this test was generated with: geth --testnet --syncmode full It only needs the first 11 blocks for this test to succeed. """ if not pytest.config.getoption("--integration"): pytest.skip("Not asked to run integration tests") # will almost certainly want verbose logging in a failure caplog.set_level(logging.DEBUG) # make sure geth has been launched wait_for_socket(geth_ipc_path) remote = Node.from_uri(enode) base_db = AtomicDB() chaindb = FakeAsyncChainDB(base_db) chaindb.persist_header(ROPSTEN_GENESIS_HEADER) headerdb = FakeAsyncHeaderDB(base_db) context = ChainContext( headerdb=headerdb, network_id=ROPSTEN_NETWORK_ID, vm_configuration=ROPSTEN_VM_CONFIGURATION, ) peer_pool = LESPeerPool( privkey=ecies.generate_privkey(), context=context, ) chain = FakeAsyncRopstenChain(base_db) syncer = LightChainSyncer(chain, chaindb, peer_pool) syncer.min_peers_to_sync = 1 peer_chain = LightPeerChain(headerdb, peer_pool) server_request_handler = LightRequestServer(headerdb, peer_pool) asyncio.ensure_future(peer_pool.run()) asyncio.ensure_future(connect_to_peers_loop(peer_pool, tuple([remote]))) asyncio.ensure_future(peer_chain.run()) asyncio.ensure_future(server_request_handler.run()) asyncio.ensure_future(syncer.run()) await asyncio.sleep( 0) # Yield control to give the LightChainSyncer a chance to start def finalizer(): event_loop.run_until_complete(peer_pool.cancel()) event_loop.run_until_complete(peer_chain.cancel()) event_loop.run_until_complete(syncer.cancel()) event_loop.run_until_complete(server_request_handler.cancel()) request.addfinalizer(finalizer) n = 11 # Wait for the chain to sync a few headers. async def wait_for_header_sync(block_number): while headerdb.get_canonical_head().block_number < block_number: await asyncio.sleep(0.1) await asyncio.wait_for(wait_for_header_sync(n), 5) # https://ropsten.etherscan.io/block/11 header = headerdb.get_canonical_block_header_by_number(n) body = await peer_chain.coro_get_block_body_by_hash(header.hash) assert len(body['transactions']) == 15 receipts = await peer_chain.coro_get_receipts(header.hash) assert len(receipts) == 15 assert encode_hex(keccak(rlp.encode(receipts[0]))) == ( '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860') assert len(peer_pool) == 1 peer = peer_pool.highest_td_peer head = await peer_chain.coro_get_block_header_by_hash(peer.head_hash) # In order to answer queries for contract code, geth needs the state trie entry for the block # we specify in the query, but because of fast sync we can only assume it has that for recent # blocks, so we use the current head to lookup the code for the contract below. # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287 contract_addr = decode_hex('0x8B09D9ac6A4F7778fCb22852e879C7F3B2bEeF81') contract_code = await peer_chain.coro_get_contract_code( head.hash, contract_addr) assert encode_hex(contract_code) == '0x600060006000600060006000356000f1' account = await peer_chain.coro_get_account(head.hash, contract_addr) assert account.code_hash == keccak(contract_code) assert account.balance == 0
class LightNode(Node[LESPeer]): _chain: LightDispatchChain = None _peer_chain: LightPeerChain = None _p2p_server: LightServer = None network_id: int = None nodekey: PrivateKey = None def __init__(self, event_bus: EndpointAPI, trinity_config: TrinityConfig) -> None: super().__init__(event_bus, trinity_config) self._nodekey = trinity_config.nodekey self._port = trinity_config.port self._max_peers = trinity_config.max_peers self._peer_chain = LightPeerChain( self.headerdb, cast(LESPeerPool, self.get_peer_pool()), token=self.master_cancel_token, ) @property def chain_class(self) -> Type[LightDispatchChain]: return self.chain_config.light_chain_class async def run(self) -> None: self.manager.run_daemon_child_service( self._peer_chain.as_new_service()) await super().run() def get_chain(self) -> LightDispatchChain: if self._chain is None: if self.chain_class is None: raise AttributeError("LightNode subclass must set chain_class") elif self._peer_chain is None: raise ValidationError("peer chain is not initialized!") else: self._chain = self.chain_class(self.headerdb, peer_chain=self._peer_chain) return self._chain def get_event_server(self) -> PeerPoolEventServer[LESPeer]: if self._event_server is None: self._event_server = LESPeerPoolEventServer( self.event_bus, self.get_peer_pool(), self._peer_chain) return self._event_server def get_p2p_server(self) -> LightServer: if self._p2p_server is None: self._p2p_server = LightServer( privkey=self._nodekey, port=self._port, chain=self.get_full_chain(), chaindb=AsyncChainDB(self._base_db), headerdb=self.headerdb, base_db=self._base_db, network_id=self._network_id, max_peers=self._max_peers, token=self.master_cancel_token, event_bus=self.event_bus, ) return self._p2p_server def get_peer_pool(self) -> BasePeerPool: return self.get_p2p_server().peer_pool
async def test_lightchain_integration(request, event_loop, caplog): """Test LightChainSyncer/LightPeerChain against a running geth instance. In order to run this you need to pass the following to pytest: pytest --integration --capture=no --enode=... If you don't have any geth testnet data ready, it is very quick to generate some with: geth --testnet --syncmode full You only need the first 11 blocks for this test to succeed. Then you can restart geth with: geth --testnet --lightserv 90 --nodiscover """ # TODO: Implement a pytest fixture that runs geth as above, so that we don't need to run it # manually. if not pytest.config.getoption("--integration"): pytest.skip("Not asked to run integration tests") # will almost certainly want verbose logging in a failure caplog.set_level(logging.DEBUG) remote = Node.from_uri(pytest.config.getoption("--enode")) base_db = MemoryDB() chaindb = FakeAsyncChainDB(base_db) chaindb.persist_header(ROPSTEN_GENESIS_HEADER) headerdb = FakeAsyncHeaderDB(base_db) peer_pool = PeerPool( LESPeer, FakeAsyncHeaderDB(base_db), ROPSTEN_NETWORK_ID, ecies.generate_privkey(), ROPSTEN_VM_CONFIGURATION, ) chain = FakeAsyncRopstenChain(base_db) syncer = LightChainSyncer(chain, chaindb, peer_pool) syncer.min_peers_to_sync = 1 peer_chain = LightPeerChain(headerdb, peer_pool) asyncio.ensure_future(peer_pool.run()) asyncio.ensure_future(connect_to_peers_loop(peer_pool, tuple([remote]))) asyncio.ensure_future(peer_chain.run()) asyncio.ensure_future(syncer.run()) await asyncio.sleep( 0) # Yield control to give the LightChainSyncer a chance to start def finalizer(): event_loop.run_until_complete(peer_pool.cancel()) event_loop.run_until_complete(peer_chain.cancel()) event_loop.run_until_complete(syncer.cancel()) request.addfinalizer(finalizer) n = 11 # Wait for the chain to sync a few headers. async def wait_for_header_sync(block_number): while headerdb.get_canonical_head().block_number < block_number: await asyncio.sleep(0.1) await asyncio.wait_for(wait_for_header_sync(n), 5) # https://ropsten.etherscan.io/block/11 header = headerdb.get_canonical_block_header_by_number(n) body = await peer_chain.get_block_body_by_hash(header.hash) assert len(body['transactions']) == 15 receipts = await peer_chain.get_receipts(header.hash) assert len(receipts) == 15 assert encode_hex(keccak(rlp.encode(receipts[0]))) == ( '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860') assert len(peer_pool) == 1 head_info = peer_pool.highest_td_peer.head_info head = await peer_chain.get_block_header_by_hash(head_info.block_hash) assert head.block_number == head_info.block_number # In order to answer queries for contract code, geth needs the state trie entry for the block # we specify in the query, but because of fast sync we can only assume it has that for recent # blocks, so we use the current head to lookup the code for the contract below. # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287 contract_addr = decode_hex('0x8B09D9ac6A4F7778fCb22852e879C7F3B2bEeF81') contract_code = await peer_chain.get_contract_code(head.hash, contract_addr) assert encode_hex(contract_code) == '0x600060006000600060006000356000f1' account = await peer_chain.get_account(head.hash, contract_addr) assert account.code_hash == keccak(contract_code) assert account.balance == 0
def test_can_instantiate_light_peer_chain(): chain = LightPeerChain(None, None) assert chain is not None