def test_handshake_eip8(): cancel_token = CancelToken("test_handshake_eip8") initiator_remote = kademlia.Node( keys.PrivateKey(eip8_values['receiver_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = HandshakeInitiator( initiator_remote, keys.PrivateKey(eip8_values['initiator_private_key']), cancel_token) initiator.ephemeral_privkey = keys.PrivateKey( eip8_values['initiator_ephemeral_private_key']) responder_remote = kademlia.Node( keys.PrivateKey(eip8_values['initiator_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) responder = HandshakeResponder( responder_remote, keys.PrivateKey(eip8_values['receiver_private_key']), cancel_token) responder.ephemeral_privkey = keys.PrivateKey( eip8_values['receiver_ephemeral_private_key']) auth_init_ciphertext = eip8_values['auth_init_ciphertext'] # Check that we can decrypt/decode the EIP-8 auth init message. initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_init_ciphertext, responder.privkey) assert initiator_nonce == eip8_values['initiator_nonce'] assert initiator_ephemeral_pubkey == (keys.PrivateKey( eip8_values['initiator_ephemeral_private_key']).public_key) responder_nonce = eip8_values['receiver_nonce'] auth_ack_ciphertext = eip8_values['auth_ack_ciphertext'] aes_secret, mac_secret, _, _ = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the responder match the expected values. assert aes_secret == eip8_values['expected_aes_secret'] assert mac_secret == eip8_values['expected_mac_secret'] responder_ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( auth_ack_ciphertext) initiator_aes_secret, initiator_mac_secret, _, _ = initiator.derive_secrets( initiator_nonce, responder_nonce, responder_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the initiator match the expected values. assert initiator_aes_secret == eip8_values['expected_aes_secret'] assert initiator_mac_secret == eip8_values['expected_mac_secret']
def get_nodes_to_connect(self): nodekey = keys.PrivateKey( decode_hex( "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" )) remoteid = nodekey.public_key.to_hex() yield kademlia.Node(keys.PublicKey(decode_hex(remoteid)), kademlia.Address('127.0.0.1', 30303, 30303))
async def get_listening_discovery_protocol(event_loop): addr = kademlia.Address('127.0.0.1', random.randint(1024, 9999)) proto = get_discovery_protocol(os.urandom(4), addr) await event_loop.create_datagram_endpoint(lambda: proto, local_addr=(addr.ip, addr.udp_port), family=socket.AF_INET) return proto
def test_check_relayed_addr(): public_host = kademlia.Address('8.8.8.8', 80) local_host = kademlia.Address('127.0.0.1', 80) assert kademlia.check_relayed_addr(local_host, local_host) assert not kademlia.check_relayed_addr(public_host, local_host) private = kademlia.Address('192.168.1.1', 80) assert kademlia.check_relayed_addr(private, private) assert not kademlia.check_relayed_addr(public_host, private) reserved = kademlia.Address('240.0.0.1', 80) assert not kademlia.check_relayed_addr(local_host, reserved) assert not kademlia.check_relayed_addr(public_host, reserved) unspecified = kademlia.Address('0.0.0.0', 80) assert not kademlia.check_relayed_addr(local_host, unspecified) assert not kademlia.check_relayed_addr(public_host, unspecified)
def _test() -> None: import argparse import signal from p2p import constants from p2p import ecies loop = asyncio.get_event_loop() loop.set_debug(True) parser = argparse.ArgumentParser() parser.add_argument('-bootnode', type=str, help="The enode to use as bootnode") parser.add_argument('-debug', action="store_true") args = parser.parse_args() log_level = logging.INFO if args.debug: log_level = logging.DEBUG logging.basicConfig(level=log_level, format='%(asctime)s %(levelname)s: %(message)s') listen_host = '127.0.0.1' # Listen on a port other than 30303 so that we can test against a local geth instance # running on that port. listen_port = 30304 privkey = ecies.generate_privkey() addr = kademlia.Address(listen_host, listen_port, listen_port) if args.bootnode: bootstrap_nodes = tuple([kademlia.Node.from_uri(args.bootnode)]) else: bootstrap_nodes = tuple( kademlia.Node.from_uri(enode) for enode in constants.ROPSTEN_BOOTNODES) discovery = DiscoveryProtocol(privkey, addr, bootstrap_nodes) loop.run_until_complete( loop.create_datagram_endpoint(lambda: discovery, local_addr=('0.0.0.0', listen_port))) async def run() -> None: try: await discovery.bootstrap() while True: await discovery.lookup_random(CancelToken("Unused")) print("====================================================") print("Random nodes: ", list(discovery.get_nodes_to_connect(10))) print("====================================================") except OperationCancelled: await discovery.stop() for sig in [signal.SIGINT, signal.SIGTERM]: loop.add_signal_handler(sig, discovery.cancel_token.trigger) loop.run_until_complete(run()) loop.close()
async def main() -> None: parser = argparse.ArgumentParser() parser.add_argument('-bootnode', type=str, help="The enode to use as bootnode") parser.add_argument('-networkid', type=int, choices=[ROPSTEN_NETWORK_ID, MAINNET_NETWORK_ID], default=ROPSTEN_NETWORK_ID, help="1 for mainnet, 3 for testnet") parser.add_argument('-l', type=str, help="Log level", default="info") args = parser.parse_args() logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S') if args.l == "debug2": # noqa: E741 log_level = DEBUG2_LEVEL_NUM else: log_level = getattr(logging, args.l.upper()) logging.getLogger('p2p').setLevel(log_level) network_cfg = PRECONFIGURED_NETWORKS[args.networkid] # Listen on a port other than 30303 so that we can test against a local geth instance # running on that port. listen_port = 30304 # Use a hard-coded privkey so that our enode is always the same. privkey = keys.PrivateKey( b'~\x054{4\r\xd64\x0f\x98\x1e\x85;\xcc\x08\x1eQ\x10t\x16\xc0\xb0\x7f)=\xc4\x1b\xb7/\x8b&\x83' ) # noqa: E501 addr = kademlia.Address('127.0.0.1', listen_port, listen_port) if args.bootnode: bootstrap_nodes = tuple([kademlia.Node.from_uri(args.bootnode)]) else: bootstrap_nodes = tuple( kademlia.Node.from_uri(enode) for enode in network_cfg.bootnodes) ipc_path = Path(f"networking-{uuid.uuid4()}.ipc") networking_connection_config = ConnectionConfig( name=NETWORKING_EVENTBUS_ENDPOINT, path=ipc_path) headerdb = TrioHeaderDB(AtomicDB(MemoryDB())) headerdb.persist_header(network_cfg.genesis_header) vm_config = network_cfg.vm_configuration enr_field_providers = (functools.partial(generate_eth_cap_enr_field, vm_config, headerdb), ) socket = trio.socket.socket(family=trio.socket.AF_INET, type=trio.socket.SOCK_DGRAM) await socket.bind(('0.0.0.0', listen_port)) async with TrioEndpoint.serve(networking_connection_config) as endpoint: service = DiscoveryService(privkey, addr, bootstrap_nodes, endpoint, socket, enr_field_providers) service.logger.info("Enode: %s", service.this_node.uri()) async with background_trio_service(service): await service.manager.wait_finished()
def _test() -> None: import argparse import signal from p2p import ecies from p2p import kademlia from p2p.constants import ROPSTEN_BOOTNODES from p2p.discovery import DiscoveryProtocol from evm.chains.ropsten import RopstenChain, ROPSTEN_GENESIS_HEADER from evm.db.backends.level import LevelDB from tests.p2p.integration_test_helpers import FakeAsyncChainDB, LocalGethPeerPool logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') logging.getLogger('p2p.chain.ChainSyncer').setLevel(logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('-db', type=str, required=True) parser.add_argument('-local-geth', action="store_true") args = parser.parse_args() loop = asyncio.get_event_loop() chaindb = FakeAsyncChainDB(LevelDB(args.db)) chaindb.persist_header(ROPSTEN_GENESIS_HEADER) privkey = ecies.generate_privkey() if args.local_geth: peer_pool = LocalGethPeerPool(ETHPeer, chaindb, RopstenChain.network_id, privkey) discovery = None else: listen_host = '0.0.0.0' listen_port = 30303 addr = kademlia.Address(listen_host, listen_port, listen_port) discovery = DiscoveryProtocol(privkey, addr, ROPSTEN_BOOTNODES) loop.run_until_complete(discovery.create_endpoint()) print("Bootstrapping discovery service...") loop.run_until_complete(discovery.bootstrap()) peer_pool = PeerPool(ETHPeer, chaindb, RopstenChain.network_id, privkey, discovery) asyncio.ensure_future(peer_pool.run()) downloader = ChainSyncer(chaindb, peer_pool) # On ROPSTEN the discovery table is usually full of bad peers so we can't require too many # peers in order to sync. downloader.min_peers_to_sync = 1 async def run(): # downloader.run() will run in a loop until the SIGINT/SIGTERM handler triggers its cancel # token, at which point it returns and we stop the pool and downloader. await downloader.run() await peer_pool.stop() await downloader.stop() if discovery is not None: discovery.stop() # Give any pending discovery tasks some time to finish. await asyncio.sleep(2) for sig in [signal.SIGINT, signal.SIGTERM]: loop.add_signal_handler(sig, downloader.cancel_token.trigger) loop.run_until_complete(run()) loop.close()
def _test(): # async def show_tasks(): # while True: # tasks = [] # for task in asyncio.Task.all_tasks(): # if task._coro.__name__ != "show_tasks": # tasks.append(task._coro.__name__) # if tasks: # logger.debug("Active tasks: %s", tasks) # await asyncio.sleep(3) privkey_hex = '65462b0520ef7d3df61b9992ed3bea0c56ead753be7c8b3614e0ce01e4cac41b' listen_host = '0.0.0.0' listen_port = 30303 bootstrap_uris = [ # Local geth bootnodes # b'enode://3a514176466fa815ed481ffad09110a2d344f6c9b78c1d14afc351c3a51be33d8072e77939dc03ba44790779b7a1025baf3003f6732430e20cd9b76d953391b3@127.0.0.1:30301', # noqa: E501 # Testnet bootnodes # b'enode://6ce05930c72abc632c58e2e4324f7c7ea478cec0ed4fa2528982cf34483094e9cbc9216e7aa349691242576d552a2a56aaeae426c5303ded677ce455ba1acd9d@13.84.180.240:30303', # noqa: E501 # b'enode://20c9ad97c081d63397d7b685a412227a40e23c8bdc6688c6f37e97cfbc22d2b4d1db1510d8f61e6a8866ad7f0e17c02b14182d37ea7c3c8b9c2683aeb6b733a1@52.169.14.227:30303', # noqa: E501 # Mainnet bootnodes # b'enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303', # noqa: E501 # b'enode://3f1d12044546b76342d59d4a05532c14b85aa669704bfe1f864fe079415aa2c02d743e03218e57a33fb94523adb54032871a6c51b2cc5514cb7c7e35b3ed0a99@13.93.211.84:30303', # noqa: E501 b'enode://78de8a0916848093c73790ead81d1928bec737d565119932b98c6b100d944b7a95e94f847f689fc723399d2e31129d182f7ef3863f2b4c820abbf3ab2722344d@191.235.84.50:30303', # noqa: E501 b'enode://158f8aab45f6d19c6cbf4a089c2670541a8da11978a2f90dbf6a502a4a3bab80d288afdbeb7ec0ef6d92de563767f3b1ea9e8e334ca711e9f8e2df5a0385e8e6@13.75.154.138:30303', # noqa: E501 b'enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303', # noqa: E501 ] # logger = logging.getLogger("p2p.discovery") logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: %(message)s') loop = asyncio.get_event_loop() loop.set_debug(True) privkey = keys.PrivateKey(decode_hex(privkey_hex)) addr = kademlia.Address(listen_host, listen_port, listen_port) bootstrap_nodes = [kademlia.Node.from_uri(x) for x in bootstrap_uris] discovery = DiscoveryProtocol(privkey, addr, bootstrap_nodes) loop.run_until_complete(discovery.listen(loop)) # There's no need to wait for bootstrap because we run_forever(). asyncio.ensure_future(discovery.bootstrap()) # This helps when debugging asyncio issues. # task_monitor = asyncio.ensure_future(show_tasks()) try: loop.run_forever() except KeyboardInterrupt: pass # task_monitor.set_result(None) discovery.stop() # logger.info("Pending tasks at exit: %s", asyncio.Task.all_tasks(loop)) loop.close()
def datagram_received(self, data: Union[bytes, Text], addr: Tuple[str, int]) -> None: ip_address, udp_port = addr address = kademlia.Address(ip_address, udp_port) # The prefix below is what geth uses to identify discv5 msgs. # https://github.com/ethereum/go-ethereum/blob/c4712bf96bc1bae4a5ad4600e9719e4a74bde7d5/p2p/discv5/udp.go#L149 # noqa: E501 if text_if_str(to_bytes, data).startswith(V5_ID_STRING): self.receive_v5(address, cast(bytes, data)) else: self.receive(address, cast(bytes, data))
def datagram_received(self, data: bytes, addr: Tuple[str, int]) -> None: # type: ignore ip_address, udp_port = addr # XXX: For now we simply discard all v5 messages. The prefix below is what geth uses to # identify them: # https://github.com/ethereum/go-ethereum/blob/c4712bf96bc1bae4a5ad4600e9719e4a74bde7d5/p2p/discv5/udp.go#L149 # noqa: E501 if text_if_str(to_bytes, data).startswith(b"temporary discovery v5"): self.logger.debug("Got discovery v5 msg, discarding") return self.receive(kademlia.Address(ip_address, udp_port), data) # type: ignore
async def test_topic_query(event_loop): bob_addr = kademlia.Address("127.0.0.1", 12345) bob = get_discovery_protocol(b"bob", bob_addr) await event_loop.create_datagram_endpoint(lambda: bob, local_addr=(bob_addr.ip, bob_addr.udp_port), family=socket.AF_INET) les_nodes = [random_node() for _ in range(10)] topic = b'les' for n in les_nodes: bob.topic_table.add_node(n, topic) alice_addr = kademlia.Address("127.0.0.1", 12346) alice = get_discovery_protocol(b"alice", alice_addr) await event_loop.create_datagram_endpoint(lambda: alice, local_addr=(alice_addr.ip, alice_addr.udp_port), family=socket.AF_INET) echo = alice.send_topic_query(bob.this_node, topic) received_nodes = await alice.wait_topic_nodes(bob.this_node, echo) assert len(received_nodes) == 10 assert sorted(received_nodes) == sorted(les_nodes)
def _get_max_neighbours_per_packet(): # As defined in https://github.com/ethereum/devp2p/blob/master/rlpx.md, the max size of a # datagram must be 1280 bytes, so when sending neighbours packets we must include up to # _max_neighbours_per_packet and if there's more than that split them across multiple # packets. # Use an IPv6 address here as we're interested in the size of the biggest possible node # representation. addr = kademlia.Address('::1', 30303, 30303) node_data = addr.to_endpoint() + [b'\x00' * (kademlia.k_pubkey_size // 8)] neighbours = [node_data] expiration = rlp.sedes.big_endian_int.serialize(int(time.time() + EXPIRATION)) payload = rlp.encode([neighbours] + [expiration]) while HEAD_SIZE + len(payload) <= 1280: neighbours.append(node_data) payload = rlp.encode([neighbours] + [expiration]) return len(neighbours) - 1
def _test(): import signal from p2p import constants from p2p import ecies from p2p.exceptions import OperationCancelled logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') loop = asyncio.get_event_loop() loop.set_debug(True) listen_host = '0.0.0.0' # Listen on a port other than 30303 in case we want to test against a local geth instance # running on that port. listen_port = 30303 privkey = ecies.generate_privkey() addr = kademlia.Address(listen_host, listen_port, listen_port) bootstrap_nodes = tuple( kademlia.Node.from_uri(enode) for enode in constants.ROPSTEN_BOOTNODES) discovery = DiscoveryProtocol(privkey, addr, bootstrap_nodes) # local_bootnodes = [ # kademlia.Node.from_uri('enode://0x3a514176466fa815ed481ffad09110a2d344f6c9b78c1d14afc351c3a51be33d8072e77939dc03ba44790779b7a1025baf3003f6732430e20cd9b76d953391b3@127.0.0.1:30303')] # noqa: E501 # discovery = DiscoveryProtocol(privkey, addr, local_bootnodes) loop.run_until_complete( loop.create_datagram_endpoint(lambda: discovery, local_addr=('0.0.0.0', listen_port))) async def run(): try: await discovery.bootstrap() while True: await discovery.lookup_random(CancelToken("Unused")) print("====================================================") print("Random nodes: ", list(discovery.get_random_nodes(10))) print("====================================================") except OperationCancelled: await discovery.stop() for sig in [signal.SIGINT, signal.SIGTERM]: loop.add_signal_handler(sig, discovery.cancel_token.trigger) loop.run_until_complete(run()) loop.close()
def _test() -> None: parser = argparse.ArgumentParser() parser.add_argument('-bootnode', type=str, help="The enode to use as bootnode") parser.add_argument('-debug', action="store_true") args = parser.parse_args() log_level = logging.DEBUG if args.debug: log_level = DEBUG2_LEVEL_NUM logging.basicConfig(level=log_level, format='%(asctime)s %(levelname)s: %(message)s') # Listen on a port other than 30303 so that we can test against a local geth instance # running on that port. listen_port = 30304 privkey = ecies.generate_privkey() addr = kademlia.Address('127.0.0.1', listen_port, listen_port) if args.bootnode: bootstrap_nodes = tuple([kademlia.Node.from_uri(args.bootnode)]) else: bootstrap_nodes = tuple( kademlia.Node.from_uri(enode) for enode in constants.ROPSTEN_BOOTNODES) ipc_path = Path(f"networking-{uuid.uuid4()}.ipc") networking_connection_config = ConnectionConfig( name=NETWORKING_EVENTBUS_ENDPOINT, path=ipc_path) async def run() -> None: socket = trio.socket.socket(family=trio.socket.AF_INET, type=trio.socket.SOCK_DGRAM) await socket.bind(('0.0.0.0', listen_port)) async with TrioEndpoint.serve( networking_connection_config) as endpoint: service = DiscoveryService(privkey, addr, bootstrap_nodes, endpoint, socket) await TrioManager.run_service(service) trio.run(run)
def _test(): import signal from p2p import constants from p2p import ecies from p2p.exceptions import OperationCancelled logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: %(message)s') loop = asyncio.get_event_loop() loop.set_debug(True) listen_host = '0.0.0.0' # Listen on a port other than 30303 in case we want to test against a local geth instance # running on that port. listen_port = 30301 privkey = ecies.generate_privkey() addr = kademlia.Address(listen_host, listen_port, listen_port) discovery = DiscoveryProtocol(privkey, addr, constants.MAINNET_BOOTNODES) # local_bootnodes = [ # 'enode://0x3a514176466fa815ed481ffad09110a2d344f6c9b78c1d14afc351c3a51be33d8072e77939dc03ba44790779b7a1025baf3003f6732430e20cd9b76d953391b3@127.0.0.1:30303'] # noqa: E501 # discovery = DiscoveryProtocol(privkey, addr, local_bootnodes) loop.run_until_complete(discovery.listen(loop)) async def run(): try: await discovery.bootstrap() while True: await discovery.lookup_random(CancelToken("Unused")) except OperationCancelled: # Give all tasks started by DiscoveryProtocol a chance to stop. await asyncio.sleep(2) for sig in [signal.SIGINT, signal.SIGTERM]: loop.add_signal_handler(sig, discovery.stop) loop.run_until_complete(run()) loop.close()
async def test_handshake(): # TODO: this test should be re-written to not depend on functionality in the `ETHPeer` class. cancel_token = CancelToken("test_handshake") use_eip8 = False initiator_remote = kademlia.Node( keys.PrivateKey(test_values['receiver_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = HandshakeInitiator( initiator_remote, keys.PrivateKey(test_values['initiator_private_key']), use_eip8, cancel_token) initiator.ephemeral_privkey = keys.PrivateKey( test_values['initiator_ephemeral_private_key']) responder_remote = kademlia.Node( keys.PrivateKey(test_values['initiator_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) responder = HandshakeResponder( responder_remote, keys.PrivateKey(test_values['receiver_private_key']), use_eip8, cancel_token) responder.ephemeral_privkey = keys.PrivateKey( test_values['receiver_ephemeral_private_key']) # Check that the auth message generated by the initiator is what we expect. Notice that we # can't use the auth_init generated here because the non-deterministic prefix would cause the # derived secrets to not match the expected values. _auth_init = initiator.create_auth_message(test_values['initiator_nonce']) assert len(_auth_init) == len(test_values['auth_plaintext']) assert _auth_init[65:] == test_values['auth_plaintext'][ 65:] # starts with non deterministic k # Check that encrypting and decrypting the auth_init gets us the orig msg. _auth_init_ciphertext = initiator.encrypt_auth_message(_auth_init) assert _auth_init == ecies.decrypt(_auth_init_ciphertext, responder.privkey) # Check that the responder correctly decodes the auth msg. auth_msg_ciphertext = test_values['auth_ciphertext'] initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_msg_ciphertext, responder.privkey) assert initiator_nonce == test_values['initiator_nonce'] assert initiator_ephemeral_pubkey == (keys.PrivateKey( test_values['initiator_ephemeral_private_key']).public_key) # Check that the auth_ack msg generated by the responder is what we expect. auth_ack_msg = responder.create_auth_ack_message( test_values['receiver_nonce']) assert auth_ack_msg == test_values['authresp_plaintext'] # Check that the secrets derived from ephemeral key agreements match the expected values. auth_ack_ciphertext = test_values['authresp_ciphertext'] aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, test_values['receiver_nonce'], initiator_ephemeral_pubkey, auth_msg_ciphertext, auth_ack_ciphertext) assert aes_secret == test_values['aes_secret'] assert mac_secret == test_values['mac_secret'] # Test values are from initiator perspective, so they're reversed here. assert ingress_mac.digest() == test_values['initial_egress_MAC'] assert egress_mac.digest() == test_values['initial_ingress_MAC'] # Check that the initiator secrets match as well. responder_ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( test_values['authresp_ciphertext']) (initiator_aes_secret, initiator_mac_secret, initiator_egress_mac, initiator_ingress_mac) = initiator.derive_secrets( initiator_nonce, responder_nonce, responder_ephemeral_pubkey, auth_msg_ciphertext, auth_ack_ciphertext) assert initiator_aes_secret == aes_secret assert initiator_mac_secret == mac_secret assert initiator_ingress_mac.digest() == test_values['initial_ingress_MAC'] assert initiator_egress_mac.digest() == test_values['initial_egress_MAC'] # Finally, check that two Peers configured with the secrets generated above understand each # other. ( (responder_reader, responder_writer), (initiator_reader, initiator_writer), ) = get_directly_connected_streams() capabilities = (('paragon', 1), ) initiator_transport = Transport(remote=initiator_remote, private_key=initiator.privkey, reader=initiator_reader, writer=initiator_writer, aes_secret=initiator_aes_secret, mac_secret=initiator_mac_secret, egress_mac=initiator_egress_mac, ingress_mac=initiator_ingress_mac) initiator_p2p_protocol = P2PProtocolV5(initiator_transport, 0, False) initiator_multiplexer = Multiplexer( transport=initiator_transport, base_protocol=initiator_p2p_protocol, protocols=(), ) initiator_multiplexer.get_base_protocol().send( Hello( HelloPayload( client_version_string='initiator', capabilities=capabilities, listen_port=30303, version=DEVP2P_V5, remote_public_key=initiator.privkey.public_key.to_bytes(), ))) responder_transport = Transport( remote=responder_remote, private_key=responder.privkey, reader=responder_reader, writer=responder_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) responder_p2p_protocol = P2PProtocolV5(responder_transport, 0, False) responder_multiplexer = Multiplexer( transport=responder_transport, base_protocol=responder_p2p_protocol, protocols=(), ) responder_multiplexer.get_base_protocol().send( Hello( HelloPayload( client_version_string='responder', capabilities=capabilities, listen_port=30303, version=DEVP2P_V5, remote_public_key=responder.privkey.public_key.to_bytes(), ))) async with initiator_multiplexer.multiplex(): async with responder_multiplexer.multiplex(): initiator_stream = initiator_multiplexer.stream_protocol_messages( initiator_p2p_protocol, ) responder_stream = responder_multiplexer.stream_protocol_messages( responder_p2p_protocol, ) initiator_hello = await asyncio.wait_for( initiator_stream.asend(None), timeout=0.1) responder_hello = await asyncio.wait_for( responder_stream.asend(None), timeout=0.1) await initiator_stream.aclose() await responder_stream.aclose() assert isinstance(responder_hello, Hello) assert isinstance(initiator_hello, Hello)
async def test_handshake_eip8(): cancel_token = CancelToken("test_handshake_eip8") use_eip8 = True initiator_remote = kademlia.Node( keys.PrivateKey(eip8_values['receiver_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = HandshakeInitiator( initiator_remote, keys.PrivateKey(eip8_values['initiator_private_key']), use_eip8, cancel_token) initiator.ephemeral_privkey = keys.PrivateKey( eip8_values['initiator_ephemeral_private_key']) responder_remote = kademlia.Node( keys.PrivateKey(eip8_values['initiator_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) responder = HandshakeResponder( responder_remote, keys.PrivateKey(eip8_values['receiver_private_key']), use_eip8, cancel_token) responder.ephemeral_privkey = keys.PrivateKey( eip8_values['receiver_ephemeral_private_key']) auth_init_ciphertext = eip8_values['auth_init_ciphertext'] # Check that we can decrypt/decode the EIP-8 auth init message. initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_init_ciphertext, responder.privkey) assert initiator_nonce == eip8_values['initiator_nonce'] assert initiator_ephemeral_pubkey == (keys.PrivateKey( eip8_values['initiator_ephemeral_private_key']).public_key) responder_nonce = eip8_values['receiver_nonce'] auth_ack_ciphertext = eip8_values['auth_ack_ciphertext'] aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the responder match the expected values. assert aes_secret == eip8_values['expected_aes_secret'] assert mac_secret == eip8_values['expected_mac_secret'] # Also according to https://github.com/ethereum/EIPs/blob/master/EIPS/eip-8.md, running B's # ingress-mac keccak state on the string "foo" yields the following hash: ingress_mac_copy = ingress_mac.copy() ingress_mac_copy.update(b'foo') assert ingress_mac_copy.hexdigest() == ( '0c7ec6340062cc46f5e9f1e3cf86f8c8c403c5a0964f5df0ebd34a75ddc86db5') responder_ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( auth_ack_ciphertext) (initiator_aes_secret, initiator_mac_secret, initiator_egress_mac, initiator_ingress_mac) = initiator.derive_secrets( initiator_nonce, responder_nonce, responder_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the initiator match the expected values. assert initiator_aes_secret == eip8_values['expected_aes_secret'] assert initiator_mac_secret == eip8_values['expected_mac_secret'] # Finally, check that two Peers configured with the secrets generated above understand each # other. ( (responder_reader, responder_writer), (initiator_reader, initiator_writer), ) = get_directly_connected_streams() capabilities = (('testing', 1), ) initiator_transport = Transport(remote=initiator_remote, private_key=initiator.privkey, reader=initiator_reader, writer=initiator_writer, aes_secret=initiator_aes_secret, mac_secret=initiator_mac_secret, egress_mac=initiator_egress_mac, ingress_mac=initiator_ingress_mac) initiator_p2p_protocol = P2PProtocolV5(initiator_transport, 0, False) initiator_multiplexer = Multiplexer( transport=initiator_transport, base_protocol=initiator_p2p_protocol, protocols=(), ) initiator_multiplexer.get_base_protocol().send( Hello( HelloPayload( client_version_string='initiator', capabilities=capabilities, listen_port=30303, version=DEVP2P_V5, remote_public_key=initiator.privkey.public_key.to_bytes(), ))) responder_transport = Transport( remote=responder_remote, private_key=responder.privkey, reader=responder_reader, writer=responder_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) responder_p2p_protocol = P2PProtocolV4(responder_transport, 0, False) responder_multiplexer = Multiplexer( transport=responder_transport, base_protocol=responder_p2p_protocol, protocols=(), ) responder_multiplexer.get_base_protocol().send( Hello( HelloPayload( client_version_string='responder', capabilities=capabilities, listen_port=30303, version=DEVP2P_V4, remote_public_key=responder.privkey.public_key.to_bytes(), ))) async with initiator_multiplexer.multiplex(): async with responder_multiplexer.multiplex(): initiator_stream = initiator_multiplexer.stream_protocol_messages( initiator_p2p_protocol, ) responder_stream = responder_multiplexer.stream_protocol_messages( responder_p2p_protocol, ) initiator_hello = await initiator_stream.asend(None) responder_hello = await responder_stream.asend(None) await initiator_stream.aclose() await responder_stream.aclose() assert isinstance(responder_hello, Hello) assert isinstance(initiator_hello, Hello)
def datagram_received(self, data: AnyStr, addr: Tuple[str, int]) -> None: # type: ignore ip_address, udp_port = addr self.receive(kademlia.Address(ip_address, udp_port), data) # type: ignore
async def get_directly_linked_peers_without_handshake(peer1_class=LESPeer, peer1_chaindb=None, peer2_class=LESPeer, peer2_chaindb=None): """See get_directly_linked_peers(). Neither the P2P handshake nor the sub-protocol handshake will be performed here. """ if peer1_chaindb is None: peer1_chaindb = get_fresh_mainnet_chaindb() if peer2_chaindb is None: peer2_chaindb = get_fresh_mainnet_chaindb() peer1_private_key = ecies.generate_privkey() peer2_private_key = ecies.generate_privkey() peer1_remote = kademlia.Node(peer2_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) peer2_remote = kademlia.Node(peer1_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = auth.HandshakeInitiator(peer1_remote, peer1_private_key) peer2_reader = asyncio.StreamReader() peer1_reader = asyncio.StreamReader() # Link the peer1's writer to the peer2's reader, and the peer2's writer to the # peer1's reader. peer2_writer = type("mock-streamwriter", (object, ), { "write": peer1_reader.feed_data, "close": lambda: None }) peer1_writer = type("mock-streamwriter", (object, ), { "write": peer2_reader.feed_data, "close": lambda: None }) peer1, peer2 = None, None handshake_finished = asyncio.Event() async def do_handshake(): nonlocal peer1, peer2 aes_secret, mac_secret, egress_mac, ingress_mac = await auth._handshake( initiator, peer1_reader, peer1_writer) # Need to copy those before we pass them on to the Peer constructor because they're # mutable. Also, the 2nd peer's ingress/egress MACs are reversed from the first peer's. peer2_ingress = egress_mac.copy() peer2_egress = ingress_mac.copy() peer1 = peer1_class(remote=peer1_remote, privkey=peer1_private_key, reader=peer1_reader, writer=peer1_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, chaindb=peer1_chaindb, network_id=1) peer2 = peer2_class(remote=peer2_remote, privkey=peer2_private_key, reader=peer2_reader, writer=peer2_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=peer2_egress, ingress_mac=peer2_ingress, chaindb=peer2_chaindb, network_id=1) handshake_finished.set() asyncio.ensure_future(do_handshake()) responder = auth.HandshakeResponder(peer2_remote, peer2_private_key) auth_msg = await peer2_reader.read(constants.ENCRYPTED_AUTH_MSG_LEN) # Can't assert return values, but checking that the decoder doesn't raise # any exceptions at least. _, _ = responder.decode_authentication(auth_msg) peer2_nonce = keccak(os.urandom(constants.HASH_LEN)) auth_ack_msg = responder.create_auth_ack_message(peer2_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) peer2_writer.write(auth_ack_ciphertext) await handshake_finished.wait() return peer1, peer2
def test_handshake_eip8(): # Data taken from https://github.com/ethereum/EIPs/blob/master/EIPS/eip-8.md test_values = { "initiator_private_key": "49a7b37aa6f6645917e7b807e9d1c00d4fa71f18343b0d4122a4d2df64dd6fee", "receiver_private_key": "b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291", "initiator_ephemeral_private_key": "869d6ecf5211f1cc60418a13b9d870b22959d0c16f02bec714c960dd2298a32d", "receiver_ephemeral_private_key": "e238eb8e04fee6511ab04c6dd3c89ce097b11f25d584863ac2b6d5b35b1847e4", "initiator_nonce": "7e968bba13b6c50e2c4cd7f241cc0d64d1ac25c7f5952df231ac6a2bda8ee5d6", "receiver_nonce": "559aead08264d5795d3909718cdd05abd49572e84fe55590eef31a88a08fdffd", } for k, v in test_values.items(): test_values[k] = decode_hex(v) initiator_remote = kademlia.Node( keys.PrivateKey(test_values['receiver_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = HandshakeInitiator( initiator_remote, keys.PrivateKey(test_values['initiator_private_key'])) initiator.ephemeral_privkey = keys.PrivateKey( test_values['initiator_ephemeral_private_key']) responder_remote = kademlia.Node( keys.PrivateKey(test_values['initiator_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) responder = HandshakeResponder( responder_remote, keys.PrivateKey(test_values['receiver_private_key'])) responder.ephemeral_privkey = keys.PrivateKey( test_values['receiver_ephemeral_private_key']) auth_init_ciphertext = decode_hex( "01b304ab7578555167be8154d5cc456f567d5ba302662433674222360f08d5f1534499d3678b513b" "0fca474f3a514b18e75683032eb63fccb16c156dc6eb2c0b1593f0d84ac74f6e475f1b8d56116b84" "9634a8c458705bf83a626ea0384d4d7341aae591fae42ce6bd5c850bfe0b999a694a49bbbaf3ef6c" "da61110601d3b4c02ab6c30437257a6e0117792631a4b47c1d52fc0f8f89caadeb7d02770bf999cc" "147d2df3b62e1ffb2c9d8c125a3984865356266bca11ce7d3a688663a51d82defaa8aad69da39ab6" "d5470e81ec5f2a7a47fb865ff7cca21516f9299a07b1bc63ba56c7a1a892112841ca44b6e0034dee" "70c9adabc15d76a54f443593fafdc3b27af8059703f88928e199cb122362a4b35f62386da7caad09" "c001edaeb5f8a06d2b26fb6cb93c52a9fca51853b68193916982358fe1e5369e249875bb8d0d0ec3" "6f917bc5e1eafd5896d46bd61ff23f1a863a8a8dcd54c7b109b771c8e61ec9c8908c733c0263440e" "2aa067241aaa433f0bb053c7b31a838504b148f570c0ad62837129e547678c5190341e4f1693956c" "3bf7678318e2d5b5340c9e488eefea198576344afbdf66db5f51204a6961a63ce072c8926c" ) # Check that we can decrypt/decode the EIP-8 auth init message. initiator_ephemeral_pubkey, initiator_nonce = responder.decode_authentication( auth_init_ciphertext) assert initiator_nonce == test_values['initiator_nonce'] assert initiator_ephemeral_pubkey == (keys.PrivateKey( test_values['initiator_ephemeral_private_key']).public_key) responder_nonce = test_values['receiver_nonce'] auth_ack_ciphertext = decode_hex( "01ea0451958701280a56482929d3b0757da8f7fbe5286784beead59d95089c217c9b917788989470" "b0e330cc6e4fb383c0340ed85fab836ec9fb8a49672712aeabbdfd1e837c1ff4cace34311cd7f4de" "05d59279e3524ab26ef753a0095637ac88f2b499b9914b5f64e143eae548a1066e14cd2f4bd7f814" "c4652f11b254f8a2d0191e2f5546fae6055694aed14d906df79ad3b407d94692694e259191cde171" "ad542fc588fa2b7333313d82a9f887332f1dfc36cea03f831cb9a23fea05b33deb999e85489e645f" "6aab1872475d488d7bd6c7c120caf28dbfc5d6833888155ed69d34dbdc39c1f299be1057810f34fb" "e754d021bfca14dc989753d61c413d261934e1a9c67ee060a25eefb54e81a4d14baff922180c395d" "3f998d70f46f6b58306f969627ae364497e73fc27f6d17ae45a413d322cb8814276be6ddd13b885b" "201b943213656cde498fa0e9ddc8e0b8f8a53824fbd82254f3e2c17e8eaea009c38b4aa0a3f306e8" "797db43c25d68e86f262e564086f59a2fc60511c42abfb3057c247a8a8fe4fb3ccbadde17514b7ac" "8000cdb6a912778426260c47f38919a91f25f4b5ffb455d6aaaf150f7e5529c100ce62d6d92826a7" "1778d809bdf60232ae21ce8a437eca8223f45ac37f6487452ce626f549b3b5fdee26afd2072e4bc7" "5833c2464c805246155289f4") aes_secret, mac_secret, _, _ = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the responder match the expected values. expected_aes_secret = decode_hex( "80e8632c05fed6fc2a13b0f8d31a3cf645366239170ea067065aba8e28bac487") expected_mac_secret = decode_hex( "2ea74ec5dae199227dff1af715362700e989d889d7a493cb0639691efb8e5f98") assert aes_secret == expected_aes_secret assert mac_secret == expected_mac_secret responder_ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( auth_ack_ciphertext) initiator_aes_secret, initiator_mac_secret, _, _ = initiator.derive_secrets( initiator_nonce, responder_nonce, responder_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the initiator match the expected values. assert initiator_aes_secret == expected_aes_secret assert initiator_mac_secret == expected_mac_secret
async def test_handshake(): # This data comes from https://gist.github.com/fjl/3a78780d17c755d22df2 test_values = { "initiator_private_key": "5e173f6ac3c669587538e7727cf19b782a4f2fda07c1eaa662c593e5e85e3051", "receiver_private_key": "c45f950382d542169ea207959ee0220ec1491755abe405cd7498d6b16adb6df8", "initiator_ephemeral_private_key": "19c2185f4f40634926ebed3af09070ca9e029f2edd5fae6253074896205f5f6c", # noqa: E501 "receiver_ephemeral_private_key": "d25688cf0ab10afa1a0e2dba7853ed5f1e5bf1c631757ed4e103b593ff3f5620", # noqa: E501 "auth_plaintext": "884c36f7ae6b406637c1f61b2f57e1d2cab813d24c6559aaf843c3f48962f32f46662c066d39669b7b2e3ba14781477417600e7728399278b1b5d801a519aa570034fdb5419558137e0d44cd13d319afe5629eeccb47fd9dfe55cc6089426e46cc762dd8a0636e07a54b31169eba0c7a20a1ac1ef68596f1f283b5c676bae4064abfcce24799d09f67e392632d3ffdc12e3d6430dcb0ea19c318343ffa7aae74d4cd26fecb93657d1cd9e9eaf4f8be720b56dd1d39f190c4e1c6b7ec66f077bb1100", # noqa: E501 "authresp_plaintext": "802b052f8b066640bba94a4fc39d63815c377fced6fcb84d27f791c9921ddf3e9bf0108e298f490812847109cbd778fae393e80323fd643209841a3b7f110397f37ec61d84cea03dcc5e8385db93248584e8af4b4d1c832d8c7453c0089687a700", # noqa: E501 "auth_ciphertext": "04a0274c5951e32132e7f088c9bdfdc76c9d91f0dc6078e848f8e3361193dbdc43b94351ea3d89e4ff33ddcefbc80070498824857f499656c4f79bbd97b6c51a514251d69fd1785ef8764bd1d262a883f780964cce6a14ff206daf1206aa073a2d35ce2697ebf3514225bef186631b2fd2316a4b7bcdefec8d75a1025ba2c5404a34e7795e1dd4bc01c6113ece07b0df13b69d3ba654a36e35e69ff9d482d88d2f0228e7d96fe11dccbb465a1831c7d4ad3a026924b182fc2bdfe016a6944312021da5cc459713b13b86a686cf34d6fe6615020e4acf26bf0d5b7579ba813e7723eb95b3cef9942f01a58bd61baee7c9bdd438956b426a4ffe238e61746a8c93d5e10680617c82e48d706ac4953f5e1c4c4f7d013c87d34a06626f498f34576dc017fdd3d581e83cfd26cf125b6d2bda1f1d56", # noqa: E501 "authresp_ciphertext": "049934a7b2d7f9af8fd9db941d9da281ac9381b5740e1f64f7092f3588d4f87f5ce55191a6653e5e80c1c5dd538169aa123e70dc6ffc5af1827e546c0e958e42dad355bcc1fcb9cdf2cf47ff524d2ad98cbf275e661bf4cf00960e74b5956b799771334f426df007350b46049adb21a6e78ab1408d5e6ccde6fb5e69f0f4c92bb9c725c02f99fa72b9cdc8dd53cff089e0e73317f61cc5abf6152513cb7d833f09d2851603919bf0fbe44d79a09245c6e8338eb502083dc84b846f2fee1cc310d2cc8b1b9334728f97220bb799376233e113", # noqa: E501 "ecdhe_shared_secret": "e3f407f83fc012470c26a93fdff534100f2c6f736439ce0ca90e9914f7d1c381", "initiator_nonce": "cd26fecb93657d1cd9e9eaf4f8be720b56dd1d39f190c4e1c6b7ec66f077bb11", "receiver_nonce": "f37ec61d84cea03dcc5e8385db93248584e8af4b4d1c832d8c7453c0089687a7", "aes_secret": "c0458fa97a5230830e05f4f20b7c755c1d4e54b1ce5cf43260bb191eef4e418d", "mac_secret": "48c938884d5067a1598272fcddaa4b833cd5e7d92e8228c0ecdfabbe68aef7f1", "token": "3f9ec2592d1554852b1f54d228f042ed0a9310ea86d038dc2b401ba8cd7fdac4", "initial_egress_MAC": "09771e93b1a6109e97074cbe2d2b0cf3d3878efafe68f53c41bb60c0ec49097e", "initial_ingress_MAC": "75823d96e23136c89666ee025fb21a432be906512b3dd4a3049e898adb433847", "initiator_hello_packet": "6ef23fcf1cec7312df623f9ae701e63b550cdb8517fefd8dd398fc2acd1d935e6e0434a2b96769078477637347b7b01924fff9ff1c06df2f804df3b0402bbb9f87365b3c6856b45e1e2b6470986813c3816a71bff9d69dd297a5dbd935ab578f6e5d7e93e4506a44f307c332d95e8a4b102585fd8ef9fc9e3e055537a5cec2e9", # noqa: E501 "receiver_hello_packet": "6ef23fcf1cec7312df623f9ae701e63be36a1cdd1b19179146019984f3625d4a6e0434a2b96769050577657247b7b02bc6c314470eca7e3ef650b98c83e9d7dd4830b3f718ff562349aead2530a8d28a8484604f92e5fced2c6183f304344ab0e7c301a0c05559f4c25db65e36820b4b909a226171a60ac6cb7beea09376d6d8" # noqa: E501 } for k, v in test_values.items(): test_values[k] = decode_hex(v) initiator_remote = kademlia.Node( keys.PrivateKey(test_values['receiver_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = HandshakeInitiator( initiator_remote, keys.PrivateKey(test_values['initiator_private_key'])) initiator.ephemeral_privkey = keys.PrivateKey( test_values['initiator_ephemeral_private_key']) responder_remote = kademlia.Node( keys.PrivateKey(test_values['initiator_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) responder = HandshakeResponder( responder_remote, keys.PrivateKey(test_values['receiver_private_key'])) responder.ephemeral_privkey = keys.PrivateKey( test_values['receiver_ephemeral_private_key']) # Check that the auth message generated by the initiator is what we expect. Notice that we # can't use the auth_init generated here because the non-deterministic prefix would cause the # derived secrets to not match the expected values. _auth_init = initiator.create_auth_message(test_values['initiator_nonce']) assert len(_auth_init) == len(test_values['auth_plaintext']) assert _auth_init[65:] == test_values['auth_plaintext'][ 65:] # starts with non deterministic k # Check that encrypting and decrypting the auth_init gets us the orig msg. _auth_init_ciphertext = initiator.encrypt_auth_message(_auth_init) assert _auth_init == ecies.decrypt(_auth_init_ciphertext, responder.privkey) # Check that the responder correctly decodes the auth msg. auth_msg_ciphertext = test_values['auth_ciphertext'] initiator_ephemeral_pubkey, initiator_nonce = responder.decode_authentication( auth_msg_ciphertext) assert initiator_nonce == test_values['initiator_nonce'] assert initiator_ephemeral_pubkey == (keys.PrivateKey( test_values['initiator_ephemeral_private_key']).public_key) # Check that the auth_ack msg generated by the responder is what we expect. auth_ack_msg = responder.create_auth_ack_message( test_values['receiver_nonce']) assert auth_ack_msg == test_values['authresp_plaintext'] # Check that the secrets derived from ephemeral key agreements match the expected values. auth_ack_ciphertext = test_values['authresp_ciphertext'] aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, test_values['receiver_nonce'], initiator_ephemeral_pubkey, auth_msg_ciphertext, auth_ack_ciphertext) assert aes_secret == test_values['aes_secret'] assert mac_secret == test_values['mac_secret'] # Test values are from initiator perspective, so they're reversed here. assert ingress_mac.digest() == test_values['initial_egress_MAC'] assert egress_mac.digest() == test_values['initial_ingress_MAC'] # Check that the initiator secrets match as well. responder_ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( test_values['authresp_ciphertext']) (initiator_aes_secret, initiator_mac_secret, initiator_egress_mac, initiator_ingress_mac) = initiator.derive_secrets( initiator_nonce, responder_nonce, responder_ephemeral_pubkey, auth_msg_ciphertext, auth_ack_ciphertext) assert initiator_aes_secret == aes_secret assert initiator_mac_secret == mac_secret assert initiator_ingress_mac.digest() == test_values['initial_ingress_MAC'] assert initiator_egress_mac.digest() == test_values['initial_egress_MAC'] # Finally, check that two Peers configured with the secrets generated above understand each # other. responder_reader = asyncio.StreamReader() initiator_reader = asyncio.StreamReader() # Link the initiator's writer to the responder's reader, and the responder's writer to the # initiator's reader. responder_writer = type("mock-streamwriter", (object, ), {"write": initiator_reader.feed_data}) initiator_writer = type("mock-streamwriter", (object, ), {"write": responder_reader.feed_data}) initiator_peer = DummyPeer(remote=initiator.remote, privkey=initiator.privkey, reader=initiator_reader, writer=initiator_writer, aes_secret=initiator_aes_secret, mac_secret=initiator_mac_secret, egress_mac=initiator_egress_mac, ingress_mac=initiator_ingress_mac, chaindb=None, network_id=1) initiator_peer.base_protocol.send_handshake() responder_peer = DummyPeer(remote=responder.remote, privkey=responder.privkey, reader=responder_reader, writer=responder_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, chaindb=None, network_id=1) responder_peer.base_protocol.send_handshake() # The handshake msgs sent by each peer (above) are going to be fed directly into their remote's # reader, and thus the read_msg() calls will return immediately. responder_hello, _ = await responder_peer.read_msg() initiator_hello, _ = await initiator_peer.read_msg() assert isinstance(responder_hello, Hello) assert isinstance(initiator_hello, Hello)
async def test_handshake_eip8(): cancel_token = CancelToken("test_handshake_eip8") use_eip8 = True initiator_remote = kademlia.Node( keys.PrivateKey(eip8_values['receiver_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = HandshakeInitiator( initiator_remote, keys.PrivateKey(eip8_values['initiator_private_key']), use_eip8, cancel_token) initiator.ephemeral_privkey = keys.PrivateKey( eip8_values['initiator_ephemeral_private_key']) responder_remote = kademlia.Node( keys.PrivateKey(eip8_values['initiator_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) responder = HandshakeResponder( responder_remote, keys.PrivateKey(eip8_values['receiver_private_key']), use_eip8, cancel_token) responder.ephemeral_privkey = keys.PrivateKey( eip8_values['receiver_ephemeral_private_key']) auth_init_ciphertext = eip8_values['auth_init_ciphertext'] # Check that we can decrypt/decode the EIP-8 auth init message. initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_init_ciphertext, responder.privkey) assert initiator_nonce == eip8_values['initiator_nonce'] assert initiator_ephemeral_pubkey == (keys.PrivateKey( eip8_values['initiator_ephemeral_private_key']).public_key) responder_nonce = eip8_values['receiver_nonce'] auth_ack_ciphertext = eip8_values['auth_ack_ciphertext'] aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the responder match the expected values. assert aes_secret == eip8_values['expected_aes_secret'] assert mac_secret == eip8_values['expected_mac_secret'] # Also according to https://github.com/ethereum/EIPs/blob/master/EIPS/eip-8.md, running B's # ingress-mac keccak state on the string "foo" yields the following hash: ingress_mac_copy = ingress_mac.copy() ingress_mac_copy.update(b'foo') assert ingress_mac_copy.hexdigest() == ( '0c7ec6340062cc46f5e9f1e3cf86f8c8c403c5a0964f5df0ebd34a75ddc86db5') responder_ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( auth_ack_ciphertext) (initiator_aes_secret, initiator_mac_secret, initiator_egress_mac, initiator_ingress_mac) = initiator.derive_secrets( initiator_nonce, responder_nonce, responder_ephemeral_pubkey, auth_init_ciphertext, auth_ack_ciphertext) # Check that the secrets derived by the initiator match the expected values. assert initiator_aes_secret == eip8_values['expected_aes_secret'] assert initiator_mac_secret == eip8_values['expected_mac_secret'] # Finally, check that two Peers configured with the secrets generated above understand each # other. responder_reader = asyncio.StreamReader() initiator_reader = asyncio.StreamReader() # Link the initiator's writer to the responder's reader, and the responder's writer to the # initiator's reader. responder_writer = MockStreamWriter(initiator_reader.feed_data) initiator_writer = MockStreamWriter(responder_reader.feed_data) initiator_peer = DumbPeer(remote=initiator.remote, privkey=initiator.privkey, reader=initiator_reader, writer=initiator_writer, aes_secret=initiator_aes_secret, mac_secret=initiator_mac_secret, egress_mac=initiator_egress_mac, ingress_mac=initiator_ingress_mac, headerdb=None, network_id=1) initiator_peer.base_protocol.send_handshake() responder_peer = DumbPeer(remote=responder.remote, privkey=responder.privkey, reader=responder_reader, writer=responder_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=None, network_id=1) responder_peer.base_protocol.send_handshake() # The handshake msgs sent by each peer (above) are going to be fed directly into their remote's # reader, and thus the read_msg() calls will return immediately. responder_hello, _ = await responder_peer.read_msg() initiator_hello, _ = await initiator_peer.read_msg() assert isinstance(responder_hello, Hello) assert isinstance(initiator_hello, Hello)
async def get_directly_linked_peers_without_handshake( alice_factory: BasePeerFactory = None, bob_factory: BasePeerFactory = None) -> Tuple[BasePeer, BasePeer]: """ See get_directly_linked_peers(). Neither the P2P handshake nor the sub-protocol handshake will be performed here. """ cancel_token = CancelToken("get_directly_linked_peers_without_handshake") if alice_factory is None: alice_factory = ParagonPeerFactory( privkey=ecies.generate_privkey(), context=ParagonContext(), token=cancel_token, ) if bob_factory is None: bob_factory = ParagonPeerFactory( privkey=ecies.generate_privkey(), context=ParagonContext(), token=cancel_token, ) alice_private_key = alice_factory.privkey bob_private_key = bob_factory.privkey alice_remote = kademlia.Node(bob_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) bob_remote = kademlia.Node(alice_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) use_eip8 = False initiator = auth.HandshakeInitiator(alice_remote, alice_private_key, use_eip8, cancel_token) f_alice: 'asyncio.Future[BasePeer]' = asyncio.Future() handshake_finished = asyncio.Event() ( (alice_reader, alice_writer), (bob_reader, bob_writer), ) = get_directly_connected_streams() async def do_handshake() -> None: aes_secret, mac_secret, egress_mac, ingress_mac = await auth._handshake( initiator, alice_reader, alice_writer, cancel_token) transport = Transport( remote=alice_remote, private_key=alice_factory.privkey, reader=alice_reader, writer=alice_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) alice = alice_factory.create_peer(transport) f_alice.set_result(alice) handshake_finished.set() asyncio.ensure_future(do_handshake()) use_eip8 = False responder = auth.HandshakeResponder(bob_remote, bob_private_key, use_eip8, cancel_token) auth_cipher = await bob_reader.read(constants.ENCRYPTED_AUTH_MSG_LEN) initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_cipher, bob_private_key) responder_nonce = keccak(os.urandom(constants.HASH_LEN)) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) bob_writer.write(auth_ack_ciphertext) await handshake_finished.wait() alice = await f_alice aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_cipher, auth_ack_ciphertext) assert egress_mac.digest() == alice.transport._ingress_mac.digest() assert ingress_mac.digest() == alice.transport._egress_mac.digest() transport = Transport( remote=bob_remote, private_key=bob_factory.privkey, reader=bob_reader, writer=bob_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) bob = bob_factory.create_peer(transport) return alice, bob
def random_node(nodeid=None): address = kademlia.Address('127.0.0.1', 30303) node = kademlia.Node(random_pubkey(), address) if nodeid is not None: node.id = nodeid return node
def random_address(): return kademlia.Address('10.0.0.{}'.format(random.randint(0, 255)), random.randint(0, 9999))
async def get_directly_linked_peers_without_handshake(peer1_class=LESPeer, peer1_headerdb=None, peer2_class=LESPeer, peer2_headerdb=None): """See get_directly_linked_peers(). Neither the P2P handshake nor the sub-protocol handshake will be performed here. """ cancel_token = CancelToken("get_directly_linked_peers_without_handshake") if peer1_headerdb is None: peer1_headerdb = get_fresh_mainnet_headerdb() if peer2_headerdb is None: peer2_headerdb = get_fresh_mainnet_headerdb() peer1_private_key = ecies.generate_privkey() peer2_private_key = ecies.generate_privkey() peer1_remote = kademlia.Node(peer2_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) peer2_remote = kademlia.Node(peer1_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = auth.HandshakeInitiator(peer1_remote, peer1_private_key, cancel_token) peer2_reader = asyncio.StreamReader() peer1_reader = asyncio.StreamReader() # Link the peer1's writer to the peer2's reader, and the peer2's writer to the # peer1's reader. peer2_writer = type("mock-streamwriter", (object, ), { "write": peer1_reader.feed_data, "close": lambda: None }) peer1_writer = type("mock-streamwriter", (object, ), { "write": peer2_reader.feed_data, "close": lambda: None }) peer1, peer2 = None, None handshake_finished = asyncio.Event() async def do_handshake(): nonlocal peer1 aes_secret, mac_secret, egress_mac, ingress_mac = await auth._handshake( initiator, peer1_reader, peer1_writer, cancel_token) peer1 = peer1_class(remote=peer1_remote, privkey=peer1_private_key, reader=peer1_reader, writer=peer1_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=peer1_headerdb, network_id=1) handshake_finished.set() asyncio.ensure_future(do_handshake()) responder = auth.HandshakeResponder(peer2_remote, peer2_private_key, cancel_token) auth_cipher = await peer2_reader.read(constants.ENCRYPTED_AUTH_MSG_LEN) initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_cipher, peer2_private_key) responder_nonce = keccak(os.urandom(constants.HASH_LEN)) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) peer2_writer.write(auth_ack_ciphertext) await handshake_finished.wait() aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_cipher, auth_ack_ciphertext) assert egress_mac.digest() == peer1.ingress_mac.digest() assert ingress_mac.digest() == peer1.egress_mac.digest() peer2 = peer2_class(remote=peer2_remote, privkey=peer2_private_key, reader=peer2_reader, writer=peer2_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=peer2_headerdb, network_id=1) return peer1, peer2
def random_node(): address = kademlia.Address('127.0.0.1', 30303) return kademlia.Node(generate_privkey(), address)
async def test_handshake(): # TODO: this test should be re-written to not depend on functionality in the `ETHPeer` class. cancel_token = CancelToken("test_handshake") use_eip8 = False initiator_remote = kademlia.Node( keys.PrivateKey(test_values['receiver_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) initiator = HandshakeInitiator( initiator_remote, keys.PrivateKey(test_values['initiator_private_key']), use_eip8, cancel_token) initiator.ephemeral_privkey = keys.PrivateKey( test_values['initiator_ephemeral_private_key']) responder_remote = kademlia.Node( keys.PrivateKey(test_values['initiator_private_key']).public_key, kademlia.Address('0.0.0.0', 0, 0)) responder = HandshakeResponder( responder_remote, keys.PrivateKey(test_values['receiver_private_key']), use_eip8, cancel_token) responder.ephemeral_privkey = keys.PrivateKey( test_values['receiver_ephemeral_private_key']) # Check that the auth message generated by the initiator is what we expect. Notice that we # can't use the auth_init generated here because the non-deterministic prefix would cause the # derived secrets to not match the expected values. _auth_init = initiator.create_auth_message(test_values['initiator_nonce']) assert len(_auth_init) == len(test_values['auth_plaintext']) assert _auth_init[65:] == test_values['auth_plaintext'][ 65:] # starts with non deterministic k # Check that encrypting and decrypting the auth_init gets us the orig msg. _auth_init_ciphertext = initiator.encrypt_auth_message(_auth_init) assert _auth_init == ecies.decrypt(_auth_init_ciphertext, responder.privkey) # Check that the responder correctly decodes the auth msg. auth_msg_ciphertext = test_values['auth_ciphertext'] initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_msg_ciphertext, responder.privkey) assert initiator_nonce == test_values['initiator_nonce'] assert initiator_ephemeral_pubkey == (keys.PrivateKey( test_values['initiator_ephemeral_private_key']).public_key) # Check that the auth_ack msg generated by the responder is what we expect. auth_ack_msg = responder.create_auth_ack_message( test_values['receiver_nonce']) assert auth_ack_msg == test_values['authresp_plaintext'] # Check that the secrets derived from ephemeral key agreements match the expected values. auth_ack_ciphertext = test_values['authresp_ciphertext'] aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, test_values['receiver_nonce'], initiator_ephemeral_pubkey, auth_msg_ciphertext, auth_ack_ciphertext) assert aes_secret == test_values['aes_secret'] assert mac_secret == test_values['mac_secret'] # Test values are from initiator perspective, so they're reversed here. assert ingress_mac.digest() == test_values['initial_egress_MAC'] assert egress_mac.digest() == test_values['initial_ingress_MAC'] # Check that the initiator secrets match as well. responder_ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( test_values['authresp_ciphertext']) (initiator_aes_secret, initiator_mac_secret, initiator_egress_mac, initiator_ingress_mac) = initiator.derive_secrets( initiator_nonce, responder_nonce, responder_ephemeral_pubkey, auth_msg_ciphertext, auth_ack_ciphertext) assert initiator_aes_secret == aes_secret assert initiator_mac_secret == mac_secret assert initiator_ingress_mac.digest() == test_values['initial_ingress_MAC'] assert initiator_egress_mac.digest() == test_values['initial_egress_MAC'] # Finally, check that two Peers configured with the secrets generated above understand each # other. responder_reader = asyncio.StreamReader() initiator_reader = asyncio.StreamReader() # Link the initiator's writer to the responder's reader, and the responder's writer to the # initiator's reader. responder_writer = MockStreamWriter(initiator_reader.feed_data) initiator_writer = MockStreamWriter(responder_reader.feed_data) initiator_peer = DumbPeer(remote=initiator.remote, privkey=initiator.privkey, reader=initiator_reader, writer=initiator_writer, aes_secret=initiator_aes_secret, mac_secret=initiator_mac_secret, egress_mac=initiator_egress_mac, ingress_mac=initiator_ingress_mac, headerdb=None, network_id=1) initiator_peer.base_protocol.send_handshake() responder_peer = DumbPeer(remote=responder.remote, privkey=responder.privkey, reader=responder_reader, writer=responder_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=None, network_id=1) responder_peer.base_protocol.send_handshake() # The handshake msgs sent by each peer (above) are going to be fed directly into their remote's # reader, and thus the read_msg() calls will return immediately. responder_hello, _ = await responder_peer.read_msg() initiator_hello, _ = await initiator_peer.read_msg() assert isinstance(responder_hello, Hello) assert isinstance(initiator_hello, Hello)
def _test() -> None: import argparse import signal from p2p import constants from p2p import ecies loop = asyncio.get_event_loop() loop.set_debug(True) parser = argparse.ArgumentParser() parser.add_argument('-bootnode', type=str, help="The enode to use as bootnode") parser.add_argument('-v5', action="store_true") parser.add_argument('-trace', action="store_true") args = parser.parse_args() log_level = logging.DEBUG if args.trace: log_level = TRACE_LEVEL_NUM logging.basicConfig(level=log_level, format='%(asctime)s %(levelname)s: %(message)s') listen_host = '127.0.0.1' # Listen on a port other than 30303 so that we can test against a local geth instance # running on that port. listen_port = 30304 privkey = ecies.generate_privkey() addr = kademlia.Address(listen_host, listen_port, listen_port) if args.bootnode: bootstrap_nodes = tuple([kademlia.Node.from_uri(args.bootnode)]) elif args.v5: bootstrap_nodes = tuple( kademlia.Node.from_uri(enode) for enode in constants.DISCOVERY_V5_BOOTNODES) else: bootstrap_nodes = tuple( kademlia.Node.from_uri(enode) for enode in constants.ROPSTEN_BOOTNODES) discovery = DiscoveryProtocol(privkey, addr, bootstrap_nodes, CancelToken("discovery")) loop.run_until_complete( loop.create_datagram_endpoint(lambda: discovery, local_addr=('0.0.0.0', listen_port))) async def run() -> None: try: if args.v5: remote = bootstrap_nodes[0] topic = b'LES@41941023680923e0' # LES/ropsten token = discovery.send_ping_v5(remote, [topic]) await discovery.wait_pong(remote, token) discovery.send_find_node_v5( remote, random.randint(0, kademlia.k_max_node_id)) await discovery.wait_neighbours(remote) else: await discovery.bootstrap() except OperationCancelled: pass finally: await discovery.stop() for sig in [signal.SIGINT, signal.SIGTERM]: loop.add_signal_handler(sig, discovery.cancel_token.trigger) loop.run_until_complete(run()) loop.close()