async def _setup_alice_and_bob_factories( alice_headerdb=None, bob_headerdb=None, alice_peer_class=HLSPeer, bob_peer_class=None): if bob_peer_class is None: bob_peer_class = alice_peer_class cancel_token = CancelToken('helios.get_directly_linked_peers_without_handshake') # # Alice # if alice_headerdb is None: alice_headerdb = get_fresh_mainnet_headerdb() alice_context = ChainContext( headerdb=alice_headerdb, network_id=1, vm_configuration=tuple(), ) if alice_peer_class is HLSPeer: alice_factory_class = HLSPeerFactory elif alice_peer_class is LESPeer: alice_factory_class = LESPeerFactory else: raise TypeError(f"Unknown peer class: {alice_peer_class}") alice_factory = alice_factory_class( privkey=ecies.generate_privkey(), context=alice_context, token=cancel_token, ) # # Bob # if bob_headerdb is None: bob_headerdb = get_fresh_mainnet_headerdb() bob_context = ChainContext( headerdb=bob_headerdb, network_id=1, vm_configuration=tuple(), ) if bob_peer_class is HLSPeer: bob_factory_class = HLSPeerFactory elif bob_peer_class is LESPeer: bob_factory_class = LESPeerFactory else: raise TypeError(f"Unknown peer class: {bob_peer_class}") bob_factory = bob_factory_class( privkey=ecies.generate_privkey(), context=bob_context, token=cancel_token, ) return alice_factory, bob_factory
def test_encrypt_decrypt(): msg = b'test yeah' privkey = ecies.generate_privkey() ciphertext = ecies.encrypt(msg, privkey.public_key) decrypted = ecies.decrypt(ciphertext, privkey) assert decrypted == msg privkey2 = ecies.generate_privkey() with pytest.raises(ecies.DecryptionError): decrypted = ecies.decrypt(ciphertext, privkey2)
def initialize_data_dir(chain_config: ChainConfig) -> None: should_create_data_dir = (not chain_config.data_dir.exists() and is_under_path(chain_config.helios_root_dir, chain_config.data_dir)) if should_create_data_dir: chain_config.data_dir.mkdir(parents=True, exist_ok=True) elif not chain_config.data_dir.exists(): # we don't lazily create the base dir for non-default base directories. raise MissingPath( "The base chain directory provided does not exist: `{0}`".format( chain_config.data_dir, ), chain_config.data_dir) # Logfile should_create_logdir = (not chain_config.logdir_path.exists() and is_under_path(chain_config.helios_root_dir, chain_config.logdir_path)) if should_create_logdir: chain_config.logdir_path.mkdir(parents=True, exist_ok=True) chain_config.logfile_path.touch() elif not chain_config.logdir_path.exists(): # we don't lazily create the base dir for non-default base directories. raise MissingPath( "The base logging directory provided does not exist: `{0}`".format( chain_config.logdir_path, ), chain_config.logdir_path) # Chain data-dir os.makedirs(chain_config.database_dir, exist_ok=True) # Nodekey if chain_config.nodekey is None: nodekey = ecies.generate_privkey() with open(chain_config.nodekey_path, 'wb') as nodekey_file: nodekey_file.write(nodekey.to_bytes())
def __init__(self, remote: kademlia.Node, privkey: datatypes.PrivateKey, use_eip8: bool, token: CancelToken) -> None: self.remote = remote self.privkey = privkey self.ephemeral_privkey = ecies.generate_privkey() self.use_eip8 = use_eip8 self.cancel_token = token
def _test() -> None: import argparse import asyncio import signal from hvm.chains.ropsten import RopstenChain, ROPSTEN_VM_CONFIGURATION from hvm.db.backends.level import LevelDB from hp2p import ecies from hp2p.kademlia import Node from helios.protocol.common.constants import DEFAULT_PREFERRED_NODES from helios.protocol.common.context import ChainContext from tests.helios.core.integration_test_helpers import ( FakeAsyncChainDB, FakeAsyncRopstenChain, connect_to_peers_loop) logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s') parser = argparse.ArgumentParser() parser.add_argument('-db', type=str, required=True) parser.add_argument('-enode', type=str, required=False, help="The enode we should connect to") args = parser.parse_args() chaindb = FakeAsyncChainDB(LevelDB(args.db)) chain = FakeAsyncRopstenChain(chaindb) network_id = RopstenChain.network_id privkey = ecies.generate_privkey() context = ChainContext(headerdb=chaindb, network_id=network_id, vm_configuration=ROPSTEN_VM_CONFIGURATION) peer_pool = HLSPeerPool(privkey=privkey, context=context) if args.enode: nodes = tuple([Node.from_uri(args.enode)]) else: nodes = DEFAULT_PREFERRED_NODES[network_id] asyncio.ensure_future(peer_pool.run()) peer_pool.run_task(connect_to_peers_loop(peer_pool, nodes)) loop = asyncio.get_event_loop() syncer = FullNodeSyncer(chain, chaindb, chaindb.db, peer_pool) sigint_received = asyncio.Event() for sig in [signal.SIGINT, signal.SIGTERM]: loop.add_signal_handler(sig, sigint_received.set) async def exit_on_sigint() -> None: await sigint_received.wait() await syncer.cancel() await peer_pool.cancel() loop.stop() loop.set_debug(True) asyncio.ensure_future(exit_on_sigint()) asyncio.ensure_future(syncer.run()) loop.run_forever() loop.close()
async def _setup_alice_and_bob_factories(alice_db=None, bob_db=None): cancel_token = CancelToken( 'helios.get_directly_linked_peers_without_handshake') # # Alice # if alice_db is None: alice_db = get_fresh_db() alice_context = get_chain_context(alice_db, generate_random_private_key()) alice_factory_class = HLSPeerFactory alice_factory = alice_factory_class( privkey=ecies.generate_privkey(), context=alice_context, token=cancel_token, ) # # Bob # if bob_db is None: bob_db = get_fresh_db() bob_context = get_chain_context(bob_db, generate_random_private_key()) bob_factory_class = HLSPeerFactory bob_factory = bob_factory_class( privkey=ecies.generate_privkey(), context=bob_context, token=cancel_token, ) return alice_factory, bob_factory
async def test_lightchain_integration(request, event_loop, caplog, geth_ipc_path, enode, geth_process): """Test LightChainSyncer/LightPeerChain against a running geth instance. In order to run this manually, you can use `tox -e py36-lightchain_integration` or: pytest --integration --capture=no tests/helios/integration/test_lightchain_integration.py The fixture for this test was generated with: geth --testnet --syncmode full It only needs the first 11 blocks for this test to succeed. """ if not pytest.config.getoption("--integration"): pytest.skip("Not asked to run integration tests") # will almost certainly want verbose logging in a failure caplog.set_level(logging.DEBUG) # make sure geth has been launched wait_for_socket(geth_ipc_path) remote = Node.from_uri(enode) base_db = AtomicDB() chaindb = FakeAsyncChainDB(base_db) chaindb.persist_header(ROPSTEN_GENESIS_HEADER) headerdb = FakeAsyncHeaderDB(base_db) context = ChainContext( headerdb=headerdb, network_id=ROPSTEN_NETWORK_ID, vm_configuration=ROPSTEN_VM_CONFIGURATION, ) peer_pool = LESPeerPool( privkey=ecies.generate_privkey(), context=context, ) chain = FakeAsyncRopstenChain(base_db) syncer = LightChainSyncer(chain, chaindb, peer_pool) syncer.min_peers_to_sync = 1 peer_chain = LightPeerChain(headerdb, peer_pool) asyncio.ensure_future(peer_pool.run()) asyncio.ensure_future(connect_to_peers_loop(peer_pool, tuple([remote]))) asyncio.ensure_future(peer_chain.run()) asyncio.ensure_future(syncer.run()) await asyncio.sleep( 0) # Yield control to give the LightChainSyncer a chance to start def finalizer(): event_loop.run_until_complete(peer_pool.cancel()) event_loop.run_until_complete(peer_chain.cancel()) event_loop.run_until_complete(syncer.cancel()) request.addfinalizer(finalizer) n = 11 # Wait for the chain to sync a few headers. async def wait_for_header_sync(block_number): while headerdb.get_canonical_head().block_number < block_number: await asyncio.sleep(0.1) await asyncio.wait_for(wait_for_header_sync(n), 5) # https://ropsten.etherscan.io/block/11 header = headerdb.get_canonical_block_header_by_number(n) body = await peer_chain.coro_get_block_body_by_hash(header.hash) assert len(body['transactions']) == 15 receipts = await peer_chain.coro_get_receipts(header.hash) assert len(receipts) == 15 assert encode_hex(keccak(rlp.encode(receipts[0]))) == ( '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860') assert len(peer_pool) == 1 peer = peer_pool.highest_td_peer head = await peer_chain.coro_get_block_header_by_hash(peer.head_hash) # In order to answer queries for contract code, geth needs the state trie entry for the block # we specify in the query, but because of fast sync we can only assume it has that for recent # blocks, so we use the current head to lookup the code for the contract below. # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287 contract_addr = decode_hex('0x8B09D9ac6A4F7778fCb22852e879C7F3B2bEeF81') contract_code = await peer_chain.coro_get_contract_code( head.hash, contract_addr) assert encode_hex(contract_code) == '0x600060006000600060006000356000f1' account = await peer_chain.coro_get_account(head.hash, contract_addr) assert account.code_hash == keccak(contract_code) assert account.balance == 0
async def get_directly_linked_peers_without_handshake( alice_factory: BasePeerFactory = None, bob_factory: BasePeerFactory = None) -> Tuple[BasePeer, BasePeer]: """ See get_directly_linked_peers(). Neither the P2P handshake nor the sub-protocol handshake will be performed here. """ cancel_token = CancelToken("get_directly_linked_peers_without_handshake") if alice_factory is None: alice_factory = ParagonPeerFactory( privkey=ecies.generate_privkey(), context=ParagonContext(), token=cancel_token, ) if bob_factory is None: bob_factory = ParagonPeerFactory( privkey=ecies.generate_privkey(), context=ParagonContext(), token=cancel_token, ) alice_private_key = alice_factory.privkey bob_private_key = bob_factory.privkey alice_remote = kademlia.Node(bob_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) bob_remote = kademlia.Node(alice_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0)) use_eip8 = False initiator = auth.HandshakeInitiator(alice_remote, alice_private_key, use_eip8, cancel_token) f_alice: 'asyncio.Future[BasePeer]' = asyncio.Future() handshake_finished = asyncio.Event() ( (alice_reader, alice_writer), (bob_reader, bob_writer), ) = get_directly_connected_streams() async def do_handshake() -> None: aes_secret, mac_secret, egress_mac, ingress_mac = await auth._handshake( initiator, alice_reader, alice_writer, cancel_token) connection = PeerConnection( reader=alice_reader, writer=alice_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) alice = alice_factory.create_peer( alice_remote, connection, ) f_alice.set_result(alice) handshake_finished.set() asyncio.ensure_future(do_handshake()) use_eip8 = False responder = auth.HandshakeResponder(bob_remote, bob_private_key, use_eip8, cancel_token) auth_cipher = await bob_reader.read(constants.ENCRYPTED_AUTH_MSG_LEN) initiator_ephemeral_pubkey, initiator_nonce, _ = decode_authentication( auth_cipher, bob_private_key) responder_nonce = keccak(os.urandom(constants.HASH_LEN)) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) bob_writer.write(auth_ack_ciphertext) await handshake_finished.wait() alice = await f_alice aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce, responder_nonce, initiator_ephemeral_pubkey, auth_cipher, auth_ack_ciphertext) assert egress_mac.digest() == alice.ingress_mac.digest() assert ingress_mac.digest() == alice.egress_mac.digest() connection = PeerConnection( reader=bob_reader, writer=bob_writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) bob = bob_factory.create_peer( bob_remote, connection, ) return alice, bob
def _test() -> None: import argparse from pathlib import Path import signal from hvm.chains.ropsten import RopstenChain, ROPSTEN_GENESIS_HEADER from hp2p import ecies from hp2p.constants import ROPSTEN_BOOTNODES from helios.utils.chains import load_nodekey from tests.helios.core.integration_test_helpers import ( FakeAsyncLevelDB, FakeAsyncHeaderDB, FakeAsyncChainDB, FakeAsyncRopstenChain) parser = argparse.ArgumentParser() parser.add_argument('-db', type=str, required=True) parser.add_argument('-debug', action="store_true") parser.add_argument('-bootnodes', type=str, default=[]) parser.add_argument('-nodekey', type=str) args = parser.parse_args() logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S') log_level = logging.INFO if args.debug: log_level = logging.DEBUG loop = asyncio.get_event_loop() db = FakeAsyncLevelDB(args.db) headerdb = FakeAsyncHeaderDB(db) chaindb = FakeAsyncChainDB(db) chaindb.persist_header(ROPSTEN_GENESIS_HEADER) chain = FakeAsyncRopstenChain(db) # NOTE: Since we may create a different priv/pub key pair every time we run this, remote nodes # may try to establish a connection using the pubkey from one of our previous runs, which will # result in lots of DecryptionErrors in receive_handshake(). if args.nodekey: privkey = load_nodekey(Path(args.nodekey)) else: privkey = ecies.generate_privkey() port = 30303 if args.bootnodes: bootstrap_nodes = args.bootnodes.split(',') else: bootstrap_nodes = ROPSTEN_BOOTNODES bootstrap_nodes = [Node.from_uri(enode) for enode in bootstrap_nodes] server = FullServer( privkey, port, chain, chaindb, headerdb, db, RopstenChain.network_id, bootstrap_nodes=bootstrap_nodes, ) server.logger.setLevel(log_level) sigint_received = asyncio.Event() for sig in [signal.SIGINT, signal.SIGTERM]: loop.add_signal_handler(sig, sigint_received.set) async def exit_on_sigint() -> None: await sigint_received.wait() await server.cancel() loop.stop() loop.set_debug(True) asyncio.ensure_future(exit_on_sigint()) asyncio.ensure_future(server.run()) loop.run_forever() loop.close()