def test_skip_candidate_if_on_list_or_fork_mismatch(): mainnet_fork_blocks = forkid.extract_fork_blocks(MAINNET_VM_CONFIGURATION) should_skip_fn = functools.partial( skip_candidate_if_on_list_or_fork_mismatch, MAINNET_GENESIS_HEADER.hash, MAINNET_GENESIS_HEADER.block_number, mainnet_fork_blocks, ) no_forkid_nodes = NodeFactory.create_batch(10) compatible_node = _make_node_with_enr_and_forkid( MAINNET_GENESIS_HEADER.hash, MAINNET_GENESIS_HEADER.block_number, MAINNET_VM_CONFIGURATION) # Ensure compatible_node's forkid is compatible with our current chain state. forkid.validate_forkid( forkid.extract_forkid(compatible_node.enr), MAINNET_GENESIS_HEADER.hash, MAINNET_GENESIS_HEADER.block_number, mainnet_fork_blocks, ) # It returns True for candidates on the skip list, even if they are fork-id compatible. skip_list = [no_forkid_nodes[1].id, compatible_node.id] assert functools.partial(should_skip_fn, skip_list)(compatible_node) is True assert functools.partial(should_skip_fn, skip_list)( no_forkid_nodes[1]) is True # It returns False for candidates with no fork-id that are not on the skip list. with pytest.raises(ENRMissingForkID): forkid.extract_forkid(no_forkid_nodes[0].enr) assert functools.partial(should_skip_fn, skip_list)( no_forkid_nodes[0]) is False # It returns False for candidates with compatible fork-ids that are not on the skip list assert functools.partial(should_skip_fn, [])(compatible_node) is False # It returns True for candidates with incompatible fork-ids incompatible_node = _make_node_with_enr_and_forkid( ROPSTEN_GENESIS_HEADER.hash, ROPSTEN_GENESIS_HEADER.block_number, ROPSTEN_VM_CONFIGURATION) with pytest.raises(BaseForkIDValidationError): forkid.validate_forkid( forkid.extract_forkid(incompatible_node.enr), MAINNET_GENESIS_HEADER.hash, MAINNET_GENESIS_HEADER.block_number, mainnet_fork_blocks, ) assert functools.partial(should_skip_fn, [])(incompatible_node) is True
def test_extract_forkid(): enr = ENR.from_repr( "enr:-Jq4QO5zEyIBU5lSa9iaen0A2xUB5_IVrCi1DbyASTTnLV5RJan6aGPr8kU0p0MYKU5YezZgdSUE" "-GOBEio6Ultyf1Aog2V0aMrJhGN2AZCDGfCggmlkgnY0gmlwhF4_wLuJc2VjcDI1NmsxoQOt7cA_B_Kg" "nQ5RmwyA6ji8M1Y0jfINItRGbOOwy7XgbIN0Y3CCdl-DdWRwgnZf") assert extract_forkid(enr) == ForkID(hash=to_bytes(hexstr='0x63760190'), next=1700000)
def skip_candidate_if_on_list_or_fork_mismatch(genesis_hash: Hash32, head: BlockNumber, fork_blocks: Tuple[BlockNumber, ...], skip_list: Container[NodeID], candidate: NodeAPI) -> bool: if skip_candidate_if_on_list(skip_list, candidate): return True # For now we accept candidates which don't specify a ForkID in their ENR, but we may want to # change that if we realize we're getting too many chain-mismatch errors when connecting. candidate_forkid = extract_forkid(candidate.enr) if candidate_forkid is None: p2p_logger.debug("Accepting connection candidate (%s) with no ForkID", candidate) return False try: validate_forkid(candidate_forkid, genesis_hash, head, fork_blocks) except BaseForkIDValidationError as e: p2p_logger.debug( "Skipping forkid-incompatible connection candidate (%s): %s", candidate, e) return True p2p_logger.debug("Accepting forkid-compatible connection candidate (%s)", candidate) return False
async def test_generate_eth_cap_enr_field(): base_db = AtomicDB() ChainDB(base_db).persist_header(ROPSTEN_GENESIS_HEADER) enr_field = await generate_eth_cap_enr_field(ROPSTEN_VM_CONFIGURATION, AsyncHeaderDB(base_db)) enr = ENRFactory(custom_kv_pairs={enr_field[0]: enr_field[1]}) assert extract_forkid(enr) == ForkID(hash=to_bytes(hexstr='0x30c7ddbc'), next=10)
def skip_candidate_if_on_list_or_fork_mismatch(genesis_hash: Hash32, head: BlockNumber, fork_blocks: Tuple[BlockNumber, ...], skip_list: Container[NodeID], candidate: NodeAPI) -> bool: if skip_candidate_if_on_list(skip_list, candidate): return True # For now we accept candidates which don't specify a ForkID in their ENR, but we may want to # change that if we realize we're getting too many chain-mismatch errors when connecting. try: candidate_forkid = extract_forkid(candidate.enr) except ENRMissingForkID: p2p_logger.debug("Accepting connection candidate (%s) with no ForkID", candidate) return False except MalformedMessage as e: # Logging as a warning just in case there's a bug in our code that fails to deserialize # valid ForkIDs. If this becomes too noisy, we should consider reducing the severity. p2p_logger.warning( "Unable to extract ForkID from ENR of %s (%s), accepting as connection candidate " "anyway", candidate, e, ) return False try: validate_forkid(candidate_forkid, genesis_hash, head, fork_blocks) except BaseForkIDValidationError as e: p2p_logger.debug( "Skipping forkid-incompatible connection candidate (%s): %s", candidate, e) return True p2p_logger.debug("Accepting forkid-compatible connection candidate (%s)", candidate) return False
async def main() -> None: logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S') logger = logging.getLogger() parser = argparse.ArgumentParser() parser.add_argument('--ipc', type=str, help="The path to DiscoveryService's IPC file") args = parser.parse_args() # XXX: This is an ugly hack, but it's the easiest way to ensure we use the same network as the # DiscoveryService instance we connect to. for n_id, network_cfg in PRECONFIGURED_NETWORKS.items(): if network_cfg.data_dir_name in args.ipc: network_id = n_id break else: raise AssertionError("Failed to detect network_id") logger.info( f"Asking DiscoveryService for peers on {network_cfg.chain_name}") connection_config = ConnectionConfig(DISCOVERY_EVENTBUS_ENDPOINT, args.ipc) network_cfg = PRECONFIGURED_NETWORKS[network_id] vm_config = network_cfg.vm_configuration fork_blocks = extract_fork_blocks(vm_config) MAX_PEERS = 260 skip_list: Set[NodeID] = set() async with TrioEndpoint(f"discv4-driver-{uuid.uuid4()}").run() as client: with trio.fail_after(2): await client.connect_to_endpoints(connection_config) await client.wait_until_any_endpoint_subscribed_to( PeerCandidatesRequest) while True: logger.info("Skip list has %d peers", len(skip_list)) should_skip = functools.partial( skip_candidate_if_on_list_or_fork_mismatch, network_cfg.genesis_header.hash, network_cfg.genesis_header.block_number, fork_blocks, skip_list, ) with trio.fail_after(1): response = await client.request( PeerCandidatesRequest(MAX_PEERS, should_skip)) candidates = response.candidates missing_forkid = [] for candidate in candidates: try: extract_forkid(candidate.enr) except ENRMissingForkID: missing_forkid.append(candidate.id) logger.info( "Got %d connection candidates, %d of those with a matching ForkID", len(candidates), len(candidates) - len(missing_forkid), ) # Add candidates with no forkid to the skip list, just so that we keep triggering # random discovery lookups and hopefully come across more candidates with # compatible forkids logger.info("Adding %d candidates with no ForkID to skip list", len(missing_forkid)) skip_list.update(missing_forkid) await trio.sleep(10)
def test_extract_forkid_malformed(): enr = ENRFactory(custom_kv_pairs={b'eth': []}) with pytest.raises(MalformedMessage): extract_forkid(enr)
def test_extract_forkid_missing(): with pytest.raises(ENRMissingForkID): extract_forkid(ENRFactory())