Beispiel #1
0
def _test():
    import argparse
    from evm.p2p import ecies
    from evm.chains.ropsten import RopstenChain, ROPSTEN_GENESIS_HEADER
    from evm.db.backends.level import LevelDB
    from evm.db.backends.memory import MemoryDB
    logging.basicConfig(level=logging.INFO,
                        format='%(levelname)s: %(message)s')

    parser = argparse.ArgumentParser()
    parser.add_argument('-db', type=str, required=True)
    parser.add_argument('-root-hash',
                        type=str,
                        required=True,
                        help='Hex encoded root hash')
    args = parser.parse_args()

    chaindb = BaseChainDB(MemoryDB())
    chaindb.persist_header_to_db(ROPSTEN_GENESIS_HEADER)
    peer_pool = PeerPool(ETHPeer, chaindb, RopstenChain.network_id,
                         ecies.generate_privkey())
    asyncio.ensure_future(peer_pool.run())

    state_db = LevelDB(args.db)
    root_hash = decode_hex(args.root_hash)
    downloader = StateDownloader(state_db, root_hash, peer_pool)
    loop = asyncio.get_event_loop()
    try:
        loop.run_until_complete(downloader.run())
    except KeyboardInterrupt:
        pass

    loop.run_until_complete(downloader.stop())
    loop.run_until_complete(peer_pool.stop())
    loop.close()
Beispiel #2
0
 def from_genesis_header(cls,
                         chaindb: BaseChainDB,
                         genesis_header: BlockHeader) -> 'BaseChain':
     """
     Initializes the chain from the genesis header.
     """
     chaindb.persist_header(genesis_header)
     return cls(chaindb)
Beispiel #3
0
def chaindb_mainnet_100():
    """Return a chaindb with mainnet headers numbered from 0 to 100."""
    here = os.path.dirname(__file__)
    headers_rlp = open(os.path.join(here, 'testdata', 'sample_1000_headers_rlp'), 'r+b').read()
    headers = rlp.decode(headers_rlp, sedes=sedes.CountableList(BlockHeader))
    chaindb = BaseChainDB(MemoryDB())
    for i in range(0, 101):
        chaindb.persist_header_to_db(headers[i])
    return chaindb
Beispiel #4
0
    def create_block(cls, transaction_packages, prev_hashes, coinbase,
                     parent_header):
        """
        Create a block with transaction witness
        """
        block = cls.generate_block_from_parent_header_and_coinbase(
            parent_header,
            coinbase,
        )

        recent_trie_nodes = {}
        receipts = []
        for (transaction, transaction_witness) in transaction_packages:
            transaction_witness.update(recent_trie_nodes)
            witness_db = BaseChainDB(MemoryDB(transaction_witness))

            execution_context = ExecutionContext.from_block_header(
                block.header, prev_hashes)
            vm_state = cls.get_state_class()(
                chaindb=witness_db,
                execution_context=execution_context,
                state_root=block.header.state_root,
                receipts=receipts,
            )
            computation, result_block, _ = vm_state.apply_transaction(
                transaction=transaction,
                block=block,
                is_stateless=True,
            )

            if not computation.is_error:
                block = result_block
                receipts = computation.vm_state.receipts
                recent_trie_nodes.update(
                    computation.vm_state.access_logs.writes)
            else:
                pass

        # Finalize
        witness_db = BaseChainDB(MemoryDB(recent_trie_nodes))
        execution_context = ExecutionContext.from_block_header(
            block.header, prev_hashes)
        vm_state = cls.get_state_class()(
            chaindb=witness_db,
            execution_context=execution_context,
            state_root=block.header.state_root,
            receipts=receipts,
        )
        block = vm_state.finalize_block(block)

        return block
Beispiel #5
0
def make_trie_root_and_nodes(transactions, trie_class=HexaryTrie):
    chaindb = BaseChainDB(MemoryDB())
    db = chaindb.db
    transaction_db = trie_class(db)

    for index, transaction in enumerate(transactions):
        index_key = rlp.encode(index, sedes=rlp.sedes.big_endian_int)
        transaction_db[index_key] = rlp.encode(transaction)

    return transaction_db.root_hash, transaction_db.db.wrapped_db.kv_store
Beispiel #6
0
def setup_tester_chain():
    from evm.chains.tester import MainnetTesterChain
    from evm.db import get_db_backend
    from evm.db.chain import BaseChainDB

    db = BaseChainDB(get_db_backend())
    genesis_params = get_default_genesis_params()
    account_keys = get_default_account_keys()
    genesis_state = generate_genesis_state(account_keys)

    chain = MainnetTesterChain.from_genesis(db, genesis_params, genesis_state)
    return account_keys, chain
Beispiel #7
0
    def from_genesis(cls,
                     chaindb: BaseChainDB,
                     genesis_params: Dict[str, HeaderParams],
                     genesis_state: AccountState=None) -> 'BaseChain':
        """
        Initializes the Chain from a genesis state.
        """
        genesis_vm_class = cls.get_vm_class_for_block_number(BlockNumber(0))
        account_db = genesis_vm_class.get_state_class().get_account_db_class()(
            chaindb.db,
            BLANK_ROOT_HASH,
        )

        if genesis_state is None:
            genesis_state = {}

        # mutation
        apply_state_dict(account_db, genesis_state)
        account_db.persist()

        if 'state_root' not in genesis_params:
            # If the genesis state_root was not specified, use the value
            # computed from the initialized state database.
            genesis_params = assoc(genesis_params, 'state_root', account_db.state_root)
        elif genesis_params['state_root'] != account_db.state_root:
            # If the genesis state_root was specified, validate that it matches
            # the computed state from the initialized state database.
            raise ValidationError(
                "The provided genesis state root does not match the computed "
                "genesis state root.  Got {0}.  Expected {1}".format(
                    account_db.state_root,
                    genesis_params['state_root'],
                )
            )

        genesis_header = BlockHeader(**genesis_params)
        genesis_chain = cls(chaindb, genesis_header)
        chaindb.persist_block(genesis_chain.get_block())
        return cls.from_genesis_header(chaindb, genesis_header)
Beispiel #8
0
def new_chain_from_fixture(fixture):
    db = BaseChainDB(get_db_backend())

    vm_config = chain_vm_configuration(fixture)

    ChainFromFixture = MainnetChain.configure(
        'ChainFromFixture',
        vm_configuration=vm_config,
    )

    return ChainFromFixture.from_genesis(
        db,
        genesis_params=genesis_params_from_fixture(fixture),
        genesis_state=fixture['pre'],
    )
Beispiel #9
0
def test_transaction_fixtures(fixture):
    header = BlockHeader(1, fixture['blocknumber'], 100)
    chain = MainnetChain(BaseChainDB(get_db_backend()), header=header)
    vm = chain.get_vm()
    TransactionClass = vm.get_transaction_class()

    if 'sender' in fixture:
        transaction = rlp.decode(fixture['rlp'], sedes=TransactionClass)
        expected = normalize_signed_transaction(fixture['transaction'])

        assert transaction.nonce == expected['nonce']
        assert transaction.gas_price == expected['gasPrice']
        assert transaction.gas == expected['gasLimit']
        assert transaction.to == expected['to']
        assert transaction.value == expected['value']
        assert transaction.data == expected['data']
        assert transaction.v == expected['v']
        assert transaction.r == expected['r']
        assert transaction.s == expected['s']

        sender = to_canonical_address(fixture['sender'])

        try:
            assert transaction.sender == sender
        except BadSignature:
            assert not (27 <= transaction.v <= 34)
    else:
        # check RLP correctness
        try:
            transaction = rlp.decode(fixture['rlp'], sedes=TransactionClass)
        # fixture normalization changes the fixture key from rlp to rlpHex
        except KeyError:
            assert fixture['rlpHex']
            return
        # rlp is a list of bytes when it shouldn't be
        except TypeError as err:
            assert err.args == ("'bytes' object cannot be interpreted as an integer",)
            return
        # rlp is invalid or not in the correct form
        except (rlp.exceptions.ObjectDeserializationError, rlp.exceptions.DecodingError):
            return

        # check parameter correctness
        try:
            transaction.validate()
        except ValidationError:
            return
Beispiel #10
0
def _test():
    import argparse
    import signal
    from evm.chains.mainnet import (MAINNET_GENESIS_HEADER,
                                    MAINNET_VM_CONFIGURATION,
                                    MAINNET_NETWORK_ID)
    from evm.chains.ropsten import ROPSTEN_GENESIS_HEADER, ROPSTEN_NETWORK_ID
    from evm.db.backends.level import LevelDB
    from evm.exceptions import CanonicalHeadNotFound
    from evm.p2p import ecies
    from evm.p2p.integration_test_helpers import LocalGethPeerPool

    logging.basicConfig(level=logging.INFO,
                        format='%(levelname)s: %(message)s')
    logging.getLogger("evm.p2p.lightchain.LightChain").setLevel(logging.DEBUG)

    parser = argparse.ArgumentParser()
    parser.add_argument('-db', type=str, required=True)
    parser.add_argument('-mainnet', action="store_true")
    parser.add_argument('-local-geth', action="store_true")
    args = parser.parse_args()

    GENESIS_HEADER = ROPSTEN_GENESIS_HEADER
    NETWORK_ID = ROPSTEN_NETWORK_ID
    if args.mainnet:
        GENESIS_HEADER = MAINNET_GENESIS_HEADER
        NETWORK_ID = MAINNET_NETWORK_ID
    DemoLightChain = LightChain.configure(
        'DemoLightChain',
        vm_configuration=MAINNET_VM_CONFIGURATION,
        network_id=NETWORK_ID,
    )

    chaindb = BaseChainDB(LevelDB(args.db))
    if args.local_geth:
        peer_pool = LocalGethPeerPool(LESPeer, chaindb, NETWORK_ID,
                                      ecies.generate_privkey())
    else:
        peer_pool = PeerPool(LESPeer, chaindb, NETWORK_ID,
                             ecies.generate_privkey())
    try:
        chaindb.get_canonical_head()
    except CanonicalHeadNotFound:
        # We're starting with a fresh DB.
        chain = DemoLightChain.from_genesis_header(chaindb, GENESIS_HEADER,
                                                   peer_pool)
    else:
        # We're reusing an existing db.
        chain = DemoLightChain(chaindb, peer_pool)

    asyncio.ensure_future(peer_pool.run())
    loop = asyncio.get_event_loop()

    async def run():
        # chain.run() will run in a loop until stop() (registered as SIGINT/SIGTERM handler) is
        # called, at which point it returns and we cleanly stop the pool and chain.
        await chain.run()
        await peer_pool.stop()
        await chain.stop()

    def stop():
        chain._should_stop.set()

    for sig in [signal.SIGINT, signal.SIGTERM]:
        loop.add_signal_handler(sig, stop)

    loop.run_until_complete(run())
    loop.close()
Beispiel #11
0
    nodekey = keys.PrivateKey(
        decode_hex(
            "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8")
    )
    remoteid = nodekey.public_key.to_hex()
    parser = argparse.ArgumentParser()
    parser.add_argument('-remoteid', type=str, default=remoteid)
    parser.add_argument('-db', type=str)
    parser.add_argument('-mainnet', action="store_true")
    args = parser.parse_args()

    remote = Node(keys.PublicKey(decode_hex(args.remoteid)),
                  Address('127.0.0.1', 30303, 30303))

    if args.db is not None:
        chaindb = BaseChainDB(LevelDB(args.db))
    else:
        chaindb = BaseChainDB(MemoryDB())

    genesis_header = ROPSTEN_GENESIS_HEADER
    chain_class = RopstenChain
    if args.mainnet:
        genesis_header = MAINNET_GENESIS_HEADER
        chain_class = MainnetChain

    try:
        chaindb.get_canonical_head()
    except CanonicalHeadNotFound:
        # We're starting with a fresh DB.
        chain = chain_class.from_genesis_header(chaindb, genesis_header)
    else:
Beispiel #12
0
def chain():
    """
    Return a Chain object containing just the genesis block.

    The Chain's state includes one funded account, which can be found in the funded_address in the
    chain itself.

    This Chain will perform all validations when importing new blocks, so only valid and finalized
    blocks can be used with it. If you want to test importing arbitrarily constructe, not
    finalized blocks, use the chain_without_block_validation fixture instead.
    """
    genesis_params = {
        "bloom":
        0,
        "coinbase":
        to_canonical_address("8888f1f195afa192cfee860698584c030f4c9db1"),
        "difficulty":
        131072,
        "extra_data":
        b"B",
        "gas_limit":
        3141592,
        "gas_used":
        0,
        "mix_hash":
        decode_hex(
            "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
        ),
        "nonce":
        decode_hex("0102030405060708"),
        "block_number":
        0,
        "parent_hash":
        decode_hex(
            "0000000000000000000000000000000000000000000000000000000000000000"
        ),
        "receipt_root":
        decode_hex(
            "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
        ),
        "state_root":
        decode_hex(
            "cafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7"
        ),
        "timestamp":
        1422494849,
        "transaction_root":
        decode_hex(
            "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
        ),
        "uncles_hash":
        decode_hex(
            "1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347")
    }
    funded_addr = to_canonical_address(
        "a94f5374fce5edbc8e2a8697c15331677e6ebf0b")
    initial_balance = 10000000000
    genesis_state = {
        funded_addr: {
            "balance": initial_balance,
            "nonce": 0,
            "code": b"",
            "storage": {}
        }
    }
    klass = Chain.configure(name='TestChain',
                            vm_configuration=((constants.GENESIS_BLOCK_NUMBER,
                                               FrontierVM), ))
    chain = klass.from_genesis(BaseChainDB(get_db_backend()), genesis_params,
                               genesis_state)
    chain.funded_address = funded_addr
    chain.funded_address_initial_balance = initial_balance
    return chain
Beispiel #13
0
def chain_without_block_validation():
    """
    Return a Chain object containing just the genesis block.

    This Chain does not perform any validation when importing new blocks.

    The Chain's state includes one funded account and a private key for it, which can be found in
    the funded_address and private_keys variables in the chain itself.
    """
    # Disable block validation so that we don't need to construct finalized blocks.
    overrides = {
        'import_block': import_block_without_validation,
        'validate_block': lambda self, block: None,
    }
    klass = Chain.configure(
        name='TestChainWithoutBlockValidation',
        vm_configuration=((constants.GENESIS_BLOCK_NUMBER, FrontierVM), ),
        **overrides,
    )
    private_key = KeyAPI().PrivateKey(
        decode_hex(
            '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8'
        ))
    funded_addr = private_key.public_key.to_canonical_address()
    initial_balance = 100000000
    genesis_params = {
        'block_number':
        constants.GENESIS_BLOCK_NUMBER,
        'difficulty':
        constants.GENESIS_DIFFICULTY,
        'gas_limit':
        constants.GENESIS_GAS_LIMIT,
        'parent_hash':
        constants.GENESIS_PARENT_HASH,
        'coinbase':
        constants.GENESIS_COINBASE,
        'nonce':
        constants.GENESIS_NONCE,
        'mix_hash':
        constants.GENESIS_MIX_HASH,
        'extra_data':
        constants.GENESIS_EXTRA_DATA,
        'timestamp':
        1501851927,
        'state_root':
        decode_hex(
            '0x9d354f9b5ba851a35eced279ef377111387197581429cfcc7f744ef89a30b5d4'
        )
    }
    genesis_state = {
        funded_addr: {
            'balance': initial_balance,
            'nonce': 0,
            'code': b'',
            'storage': {},
        }
    }
    chain = klass.from_genesis(BaseChainDB(get_db_backend()), genesis_params,
                               genesis_state)
    chain.funded_address = funded_addr
    chain.funded_address_initial_balance = initial_balance
    chain.funded_address_private_key = private_key
    return chain
Beispiel #14
0
def test_blockchain_fixtures(fixture, chain_vm_configuration):
    genesis_params = {
        'parent_hash': fixture['genesisBlockHeader']['parentHash'],
        'uncles_hash': fixture['genesisBlockHeader']['uncleHash'],
        'coinbase': fixture['genesisBlockHeader']['coinbase'],
        'state_root': fixture['genesisBlockHeader']['stateRoot'],
        'transaction_root': fixture['genesisBlockHeader']['transactionsTrie'],
        'receipt_root': fixture['genesisBlockHeader']['receiptTrie'],
        'bloom': fixture['genesisBlockHeader']['bloom'],
        'difficulty': fixture['genesisBlockHeader']['difficulty'],
        'block_number': fixture['genesisBlockHeader']['number'],
        'gas_limit': fixture['genesisBlockHeader']['gasLimit'],
        'gas_used': fixture['genesisBlockHeader']['gasUsed'],
        'timestamp': fixture['genesisBlockHeader']['timestamp'],
        'extra_data': fixture['genesisBlockHeader']['extraData'],
        'mix_hash': fixture['genesisBlockHeader']['mixHash'],
        'nonce': fixture['genesisBlockHeader']['nonce'],
    }
    expected_genesis_header = BlockHeader(**genesis_params)

    # TODO: find out if this is supposed to pass?
    # if 'genesisRLP' in fixture:
    #     assert rlp.encode(genesis_header) == fixture['genesisRLP']
    db = BaseChainDB(get_db_backend())

    ChainForTesting = MainnetChain.configure(
        'ChainForTesting',
        vm_configuration=chain_vm_configuration,
    )

    chain = ChainForTesting.from_genesis(
        db,
        genesis_params=genesis_params,
        genesis_state=fixture['pre'],
    )

    genesis_block = chain.get_canonical_block_by_number(0)
    genesis_header = genesis_block.header

    assert_rlp_equal(genesis_header, expected_genesis_header)

    # 1 - mine the genesis block
    # 2 - loop over blocks:
    #     - apply transactions
    #     - mine block
    # 4 - profit!!

    for block_data in fixture['blocks']:
        should_be_good_block = 'blockHeader' in block_data

        if 'rlp_error' in block_data:
            assert not should_be_good_block
            continue

        # The block to import may be in a different block-class-range than the
        # chain's current one, so we use the block number specified in the
        # fixture to look up the correct block class.
        if should_be_good_block:
            block_number = block_data['blockHeader']['number']
            block_class = chain.get_vm_class_for_block_number(
                block_number).get_block_class()
        else:
            block_class = chain.get_vm().get_block_class()

        try:
            block = rlp.decode(block_data['rlp'],
                               sedes=block_class,
                               chaindb=chain.chaindb)
        except (TypeError, rlp.DecodingError, rlp.DeserializationError) as err:
            assert not should_be_good_block, "Block should be good: {0}".format(
                err)
            continue

        try:
            mined_block = chain.import_block(block)
        except ValidationError as err:
            assert not should_be_good_block, "Block should be good: {0}".format(
                err)
            continue
        else:
            assert_rlp_equal(mined_block, block)
            assert should_be_good_block, "Block should have caused a validation error"

    latest_block_hash = chain.get_canonical_block_by_number(
        chain.get_block().number - 1).hash
    assert latest_block_hash == fixture['lastblockhash']

    with chain.get_vm().state_db(read_only=True) as state_db:
        verify_state_db(fixture['postState'], state_db)
def test_vm_fixtures(fixture, vm_class):
    chaindb = BaseChainDB(get_db_backend())
    header = BlockHeader(
        coinbase=fixture['env']['currentCoinbase'],
        difficulty=fixture['env']['currentDifficulty'],
        block_number=fixture['env']['currentNumber'],
        gas_limit=fixture['env']['currentGasLimit'],
        timestamp=fixture['env']['currentTimestamp'],
    )
    vm = vm_class(header=header, chaindb=chaindb)
    vm_state = vm.state
    with vm_state.state_db() as state_db:
        setup_state_db(fixture['pre'], state_db)
        code = state_db.get_code(fixture['exec']['address'])
    # Update state_root manually
    vm.block.header.state_root = vm_state.state_root

    message = Message(
        origin=fixture['exec']['origin'],
        to=fixture['exec']['address'],
        sender=fixture['exec']['caller'],
        value=fixture['exec']['value'],
        data=fixture['exec']['data'],
        code=code,
        gas=fixture['exec']['gas'],
        gas_price=fixture['exec']['gasPrice'],
    )
    computation = vm.state.get_computation(message).apply_computation(
        vm.state,
        message,
    )
    # Update state_root manually
    vm.block.header.state_root = computation.vm_state.state_root

    if 'post' in fixture:
        #
        # Success checks
        #
        assert not computation.is_error

        log_entries = computation.get_log_entries()
        if 'logs' in fixture:
            actual_logs_hash = hash_log_entries(log_entries)
            expected_logs_hash = fixture['logs']
            assert expected_logs_hash == actual_logs_hash
        elif log_entries:
            raise AssertionError("Got log entries: {0}".format(log_entries))

        expected_output = fixture['out']
        assert computation.output == expected_output

        gas_meter = computation.gas_meter

        expected_gas_remaining = fixture['gas']
        actual_gas_remaining = gas_meter.gas_remaining
        gas_delta = actual_gas_remaining - expected_gas_remaining
        assert gas_delta == 0, "Gas difference: {0}".format(gas_delta)

        call_creates = fixture.get('callcreates', [])
        assert len(computation.children) == len(call_creates)

        call_creates = fixture.get('callcreates', [])
        for child_computation, created_call in zip(computation.children,
                                                   call_creates):
            to_address = created_call['destination']
            data = created_call['data']
            gas_limit = created_call['gasLimit']
            value = created_call['value']

            assert child_computation.msg.to == to_address
            assert data == child_computation.msg.data or child_computation.msg.code
            assert gas_limit == child_computation.msg.gas
            assert value == child_computation.msg.value
        post_state = fixture['post']
    else:
        #
        # Error checks
        #
        assert computation.is_error
        assert isinstance(computation._error, VMError)
        post_state = fixture['pre']

    with vm.state.state_db(read_only=True) as state_db:
        verify_state_db(post_state, state_db)
Beispiel #16
0
async def get_directly_linked_peers(chaindb1=None, received_msg_callback1=None,
                                    chaindb2=None, received_msg_callback2=None):
    """Create two LESPeers with their readers/writers connected directly.

    The first peer's reader will write directly to the second's writer, and vice-versa.
    """
    if chaindb1 is None:
        chaindb1 = BaseChainDB(MemoryDB())
        chaindb1.persist_header_to_db(MAINNET_GENESIS_HEADER)
    if chaindb2 is None:
        chaindb2 = BaseChainDB(MemoryDB())
        chaindb2.persist_header_to_db(MAINNET_GENESIS_HEADER)
    peer1_private_key = ecies.generate_privkey()
    peer2_private_key = ecies.generate_privkey()
    peer1_remote = kademlia.Node(
        peer2_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0))
    peer2_remote = kademlia.Node(
        peer1_private_key.public_key, kademlia.Address('0.0.0.0', 0, 0))
    initiator = auth.HandshakeInitiator(peer1_remote, peer1_private_key)
    peer2_reader = asyncio.StreamReader()
    peer1_reader = asyncio.StreamReader()
    # Link the peer1's writer to the peer2's reader, and the peer2's writer to the
    # peer1's reader.
    peer2_writer = type(
        "mock-streamwriter",
        (object,),
        {"write": peer1_reader.feed_data,
         "close": lambda: None}
    )
    peer1_writer = type(
        "mock-streamwriter",
        (object,),
        {"write": peer2_reader.feed_data,
         "close": lambda: None}
    )

    peer1, peer2 = None, None
    handshake_finished = asyncio.Event()

    async def do_handshake():
        nonlocal peer1, peer2
        aes_secret, mac_secret, egress_mac, ingress_mac = await auth._handshake(
            initiator, peer1_reader, peer1_writer)

        # Need to copy those before we pass them on to the Peer constructor because they're
        # mutable. Also, the 2nd peer's ingress/egress MACs are reversed from the first peer's.
        peer2_ingress = egress_mac.copy()
        peer2_egress = ingress_mac.copy()

        peer1 = LESPeerServing(
            remote=peer1_remote, privkey=peer1_private_key, reader=peer1_reader,
            writer=peer1_writer, aes_secret=aes_secret, mac_secret=mac_secret,
            egress_mac=egress_mac, ingress_mac=ingress_mac, chaindb=chaindb1,
            network_id=1, received_msg_callback=received_msg_callback1)

        peer2 = LESPeerServing(
            remote=peer2_remote, privkey=peer2_private_key, reader=peer2_reader,
            writer=peer2_writer, aes_secret=aes_secret, mac_secret=mac_secret,
            egress_mac=peer2_egress, ingress_mac=peer2_ingress, chaindb=chaindb2,
            network_id=1, received_msg_callback=received_msg_callback2)

        handshake_finished.set()

    asyncio.ensure_future(do_handshake())

    responder = auth.HandshakeResponder(peer2_remote, peer2_private_key)
    auth_msg = await peer2_reader.read(constants.ENCRYPTED_AUTH_MSG_LEN)
    peer1_ephemeral_pubkey, peer1_nonce = responder.decode_authentication(auth_msg)

    peer2_nonce = keccak(os.urandom(constants.HASH_LEN))
    auth_ack_msg = responder.create_auth_ack_message(peer2_nonce)
    auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg)
    peer2_writer.write(auth_ack_ciphertext)

    await handshake_finished.wait()

    # Perform the base protocol (P2P) handshake.
    peer1.base_protocol.send_handshake()
    peer2.base_protocol.send_handshake()
    msg1 = await peer1.read_msg()
    peer1.process_msg(msg1)
    msg2 = await peer2.read_msg()
    peer2.process_msg(msg2)

    # Now send the handshake msg for each enabled sub-protocol.
    for proto in peer1.enabled_sub_protocols:
        proto.send_handshake(peer1._local_chain_info)
    for proto in peer2.enabled_sub_protocols:
        proto.send_handshake(peer2._local_chain_info)

    return peer1, peer2
Beispiel #17
0
def test_create_block(chain):

    # (1) Empty block.
    # block = vm.mine_block()
    block0 = chain.import_block(chain.get_vm().block)
    initial_state_root = block0.header.state_root

    # (2) Use VM.apply_transaction to get the witness data
    chain1 = copy.deepcopy(chain)
    vm1 = chain1.get_vm()

    # The first transaction
    vm = chain.get_vm()
    recipient1 = decode_hex('0x1111111111111111111111111111111111111111')
    amount = 100
    from_ = chain.funded_address
    tx1 = new_transaction(vm1, from_, recipient1, amount,
                          chain.funded_address_private_key)

    # Get the witness of tx1
    computation, _ = vm1.apply_transaction(tx1)
    transaction_witness1 = computation.vm_state.access_logs.reads

    # The second transaction
    recipient2 = decode_hex('0x2222222222222222222222222222222222222222')
    tx2 = new_transaction(vm1, from_, recipient2, amount,
                          chain.funded_address_private_key)

    # Get the witness of tx2
    computation, block = vm1.apply_transaction(tx2)
    transaction_witness2 = computation.vm_state.access_logs.reads

    # Check AccessLogs
    witness_db = BaseChainDB(MemoryDB(computation.vm_state.access_logs.writes))
    state_db = witness_db.get_state_db(block.header.state_root, read_only=True)
    assert state_db.get_balance(recipient2) == amount
    with pytest.raises(KeyError):
        state_db.get_balance(recipient1)

    # Create a block and import to chain
    coinbase = decode_hex('0x3333333333333333333333333333333333333333')
    vm1.block.header.coinbase = coinbase
    assert len(vm1.block.transactions) == 2
    block1 = chain1.import_block(vm1.block)

    # Check the block
    vm1 = chain1.get_vm()
    assert block1.header.coinbase == coinbase
    assert len(block1.transactions) == 2
    assert len(block1.get_receipts(vm1.chaindb)) == 2
    with vm1.state.state_db(read_only=True) as state_db1:
        assert state_db1.root_hash == block1.header.state_root

    # (3) Try to create a block by witnesses
    vm2 = copy.deepcopy(vm)
    transaction_packages = [
        (tx1, transaction_witness1),
        (tx2, transaction_witness2),
    ]
    prev_hashes = vm2.get_prev_hashes(
        last_block_hash=block0.hash,
        db=vm2.chaindb,
    )
    parent_header = block0.header

    # Create a block
    block2 = vm2.create_block(
        transaction_packages=transaction_packages,
        prev_hashes=prev_hashes,
        coinbase=coinbase,
        parent_header=parent_header,
    )

    # Check the block
    assert len(block2.transactions) == 2
    assert block2.header.block_number == 2
    assert block2.header.coinbase == coinbase

    # Check if block2 == block1
    assert block2.hash == block1.hash

    # Check if the given parameters are changed
    assert block0.header.state_root == initial_state_root
    assert block0.header.block_number == 1
Beispiel #18
0
def chaindb():
    return BaseChainDB(get_db_backend())
Beispiel #19
0
def _test():
    """
    Create a Peer instance connected to a local geth instance and log messages exchanged with it.

    Use the following command line to run geth:

        ./build/bin/geth -vmodule p2p=4,p2p/discv5=0,eth/*=0 \
          -nodekeyhex 45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8 \
          -testnet -lightserv 90
    """
    import argparse
    from evm.chains.ropsten import RopstenChain, ROPSTEN_GENESIS_HEADER
    from evm.db.backends.memory import MemoryDB
    logging.basicConfig(level=logging.DEBUG,
                        format='%(levelname)s: %(message)s')

    # The default remoteid can be used if you pass nodekeyhex as above to geth.
    nodekey = keys.PrivateKey(
        decode_hex(
            "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8")
    )
    remoteid = nodekey.public_key.to_hex()
    parser = argparse.ArgumentParser()
    parser.add_argument('-remoteid', type=str, default=remoteid)
    parser.add_argument('-light',
                        action='store_true',
                        help="Connect as a light node")
    args = parser.parse_args()

    peer_class = ETHPeer  # type: ignore
    if args.light:
        peer_class = LESPeer  # type: ignore
    remote = Node(keys.PublicKey(decode_hex(args.remoteid)),
                  Address('127.0.0.1', 30303, 30303))
    chaindb = BaseChainDB(MemoryDB())
    chaindb.persist_header_to_db(ROPSTEN_GENESIS_HEADER)
    network_id = RopstenChain.network_id
    loop = asyncio.get_event_loop()
    try:
        peer = loop.run_until_complete(
            asyncio.wait_for(
                handshake(remote, ecies.generate_privkey(), peer_class,
                          chaindb, network_id), HANDSHAKE_TIMEOUT))
        asyncio.ensure_future(peer.do_sub_proto_handshake())

        async def request_stuff():
            # Request some stuff from ropsten's block 2440319
            # (https://ropsten.etherscan.io/block/2440319), just as a basic test.
            nonlocal peer
            block_hash = decode_hex(
                '0x59af08ab31822c992bb3dad92ddb68d820aa4c69e9560f07081fa53f1009b152'
            )
            if peer_class == ETHPeer:
                peer = cast(ETHPeer, peer)
                peer.sub_proto.send_get_block_headers(block_hash, 1)
                peer.sub_proto.send_get_block_bodies([block_hash])
                peer.sub_proto.send_get_receipts([block_hash])
            else:
                peer = cast(LESPeer, peer)
                request_id = 1
                peer.sub_proto.send_get_block_headers(block_hash, 1,
                                                      request_id)
                peer.sub_proto.send_get_block_bodies([block_hash],
                                                     request_id + 1)
                peer.sub_proto.send_get_receipts(block_hash, request_id + 2)

        asyncio.ensure_future(request_stuff())
        loop.run_until_complete(peer.run())
    except KeyboardInterrupt:
        pass

    loop.run_until_complete(peer.stop())
    loop.close()
Beispiel #20
0
def test_state_fixtures(fixture, fixture_vm_class):
    header = BlockHeader(
        coinbase=fixture['env']['currentCoinbase'],
        difficulty=fixture['env']['currentDifficulty'],
        block_number=fixture['env']['currentNumber'],
        gas_limit=fixture['env']['currentGasLimit'],
        timestamp=fixture['env']['currentTimestamp'],
        parent_hash=fixture['env']['previousHash'],
    )
    chaindb = BaseChainDB(get_db_backend())
    vm = fixture_vm_class(header=header, chaindb=chaindb)

    with vm.state.state_db() as state_db:
        setup_state_db(fixture['pre'], state_db)

    if 'secretKey' in fixture['transaction']:
        unsigned_transaction = vm.create_unsigned_transaction(
            nonce=fixture['transaction']['nonce'],
            gas_price=fixture['transaction']['gasPrice'],
            gas=fixture['transaction']['gasLimit'],
            to=fixture['transaction']['to'],
            value=fixture['transaction']['value'],
            data=fixture['transaction']['data'],
        )
        private_key = keys.PrivateKey(fixture['transaction']['secretKey'])
        transaction = unsigned_transaction.as_signed_transaction(
            private_key=private_key)
    elif 'vrs' in fixture['transaction']:
        v, r, s = (
            fixture['transaction']['v'],
            fixture['transaction']['r'],
            fixture['transaction']['s'],
        )
        transaction = vm.create_transaction(
            nonce=fixture['transaction']['nonce'],
            gas_price=fixture['transaction']['gasPrice'],
            gas=fixture['transaction']['gasLimit'],
            to=fixture['transaction']['to'],
            value=fixture['transaction']['value'],
            data=fixture['transaction']['data'],
            v=v,
            r=r,
            s=s,
        )

    try:
        computation = vm.apply_transaction(transaction)
    except ValidationError as err:
        transaction_error = err
    else:
        transaction_error = False

    if not transaction_error:
        log_entries = computation.get_log_entries()
        actual_logs_hash = hash_log_entries(log_entries)
        if 'logs' in fixture['post']:
            expected_logs_hash = fixture['post']['logs']
            assert expected_logs_hash == actual_logs_hash
        elif log_entries:
            raise AssertionError("Got log {0} entries. hash:{1}".format(
                len(log_entries),
                actual_logs_hash,
            ))

        if 'out' in fixture:
            expected_output = fixture['out']
            if isinstance(expected_output, int):
                assert len(computation.output) == expected_output
            else:
                assert computation.output == expected_output

    assert vm.block.header.state_root == fixture['post']['hash']
Beispiel #21
0
async def test_lightchain_integration(request, event_loop):
    """Test LightChain against a local geth instance.

    This test assumes a geth/ropsten instance is listening on 127.0.0.1:30303 and serving light
    clients. In order to achieve that, simply run it with the following command line:

        $ geth -nodekeyhex 45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8 \
               -testnet -lightserv 90
    """
    # TODO: Implement a pytest fixture that runs geth as above, so that we don't need to run it
    # manually.
    if not pytest.config.getoption("--integration"):
        pytest.skip("Not asked to run integration tests")

    chaindb = BaseChainDB(MemoryDB())
    chaindb.persist_header_to_db(ROPSTEN_GENESIS_HEADER)
    chain = IntegrationTestLightChain(chaindb)
    asyncio.ensure_future(chain.run())
    await asyncio.sleep(
        0)  # Yield control to give the LightChain a chance to start

    def finalizer():
        event_loop.run_until_complete(chain.stop())

    request.addfinalizer(finalizer)

    n = 11

    # Wait for the chain to sync a few headers.
    async def wait_for_header_sync(block_number):
        while chaindb.get_canonical_head().block_number < block_number:
            await asyncio.sleep(0.1)

    await asyncio.wait_for(wait_for_header_sync(n), 2)

    # https://ropsten.etherscan.io/block/11
    b = await chain.get_canonical_block_by_number(n)
    assert isinstance(b, FrontierBlock)
    assert b.number == 11
    assert encode_hex(b.hash) == (
        '0xda882aeff30f59eda9da2b3ace3023366ab9d4219b5a83cdd589347baae8678e')
    assert len(b.transactions) == 15
    assert isinstance(b.transactions[0], b.transaction_class)

    receipts = await chain.get_receipts(b.hash)
    assert len(receipts) == 15
    assert encode_hex(keccak(rlp.encode(receipts[0]))) == (
        '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860')

    head_info = list(chain._latest_head_info.values())[0]
    head = await chain.get_block_by_hash(head_info.block_hash)
    assert head.number == head_info.block_number

    # In order to answer queries for contract code, geth needs the state trie entry for the block
    # we specify in the query, but because of fast sync we can only assume it has that for recent
    # blocks, so we use the current head to lookup the code for the contract below.
    # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287
    contract_addr = decode_hex('95a48dca999c89e4e284930d9b9af973a7481287')
    contract_code = await chain.get_contract_code(head.hash,
                                                  keccak(contract_addr))
    assert encode_hex(keccak(contract_code)) == (
        '0x1e0b2ad970b365a217c40bcf3582cbb4fcc1642d7a5dd7a82ae1e278e010123e')

    proof = await chain.get_proof(head.hash, keccak(contract_addr))
    assert proof != b''
Beispiel #22
0
if __name__ == "__main__":
    import argparse
    from evm.p2p import ecies
    from evm.chains.ropsten import RopstenChain, ROPSTEN_GENESIS_HEADER
    from evm.db.chain import BaseChainDB
    from evm.db.backends.level import LevelDB
    from evm.db.backends.memory import MemoryDB
    logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')

    parser = argparse.ArgumentParser()
    parser.add_argument('-db', type=str, required=True)
    parser.add_argument('-root-hash', type=str, required=True, help='Hex encoded root hash')
    args = parser.parse_args()

    chaindb = BaseChainDB(MemoryDB())
    chaindb.persist_header_to_db(ROPSTEN_GENESIS_HEADER)
    network_id = RopstenChain.network_id
    privkey = ecies.generate_privkey()

    state_db = LevelDB(args.db)
    root_hash = decode_hex(args.root_hash)
    downloader = StateDownloader(chaindb, state_db, root_hash, network_id, privkey)
    loop = asyncio.get_event_loop()
    try:
        loop.run_until_complete(downloader.run())
    except KeyboardInterrupt:
        pass

    loop.run_until_complete(downloader.stop())
    loop.close()
Beispiel #23
0
from evm.p2p import ecies
from evm.p2p.lightchain import LightChain
from evm.db.backends.level import LevelDB

parser = argparse.ArgumentParser()
parser.add_argument('-db', type=str, required=True)
args = parser.parse_args()

DemoLightChain = LightChain.configure(
    name='Demo LightChain',
    privkey=ecies.generate_privkey(),
    vm_configuration=MAINNET_VM_CONFIGURATION,
    network_id=ROPSTEN_NETWORK_ID,
)

chain = DemoLightChain(BaseChainDB(LevelDB(args.db)))
loop = asyncio.get_event_loop()
t = threading.Thread(target=loop.run_until_complete, args=(chain.run(), ))
t.setDaemon(True)
t.start()


def wait_for_result(coroutine):
    future = asyncio.run_coroutine_threadsafe(coroutine, loop)
    return future.result()


def cleanup():
    # This is to instruct chain.run() to exit, which will cause the event loop to stop.
    chain._should_stop.set()
    # This will block until the event loop has stopped.
Beispiel #24
0
def get_fresh_mainnet_chaindb():
    chaindb = BaseChainDB(MemoryDB())
    chaindb.persist_header_to_db(MAINNET_GENESIS_HEADER)
    return chaindb
Beispiel #25
0
def test_apply_transaction(chain_without_block_validation):  # noqa: F811
    chain = chain_without_block_validation  # noqa: F811

    # Don't change these variables
    vm = chain.get_vm()
    chaindb = copy.deepcopy(vm.chaindb)
    block0 = copy.deepcopy(vm.block)
    prev_block_hash = chain.get_canonical_block_by_number(0).hash
    initial_state_root = vm.block.header.state_root

    # (1) Get VM.apply_transaction(transaction) result for assertion
    # The first transaction
    chain1 = copy.deepcopy(chain)
    vm_example = chain1.get_vm()
    recipient1 = decode_hex('0x1111111111111111111111111111111111111111')
    amount = 100
    from_ = chain.funded_address
    tx1 = new_transaction(
        vm_example,
        from_,
        recipient1,
        amount,
        private_key=chain.funded_address_private_key,
    )
    computation, result_block = vm_example.apply_transaction(tx1)

    # The second transaction
    recipient2 = decode_hex('0x2222222222222222222222222222222222222222')
    tx2 = new_transaction(
        vm_example,
        from_,
        recipient2,
        amount,
        private_key=chain.funded_address_private_key,
    )
    computation, result_block = vm_example.apply_transaction(tx2)
    assert len(result_block.transactions) == 2

    # (2) Test VMState.apply_transaction(...)
    # Use FrontierVMState to apply transaction
    chaindb1 = copy.deepcopy(chaindb)
    block1 = copy.deepcopy(block0)
    prev_hashes = vm.get_prev_hashes(
        last_block_hash=prev_block_hash,
        db=vm.chaindb,
    )
    execution_context = ExecutionContext.from_block_header(block1.header, prev_hashes)
    vm_state1 = FrontierVMState(
        chaindb=chaindb1,
        execution_context=execution_context,
        state_root=block1.header.state_root,
        receipts=[],
    )
    parent_hash = copy.deepcopy(prev_hashes[0])

    computation, block, _ = vm_state1.apply_transaction(
        tx1,
        block1,
    )
    access_logs1 = computation.vm_state.access_logs

    # Check if prev_hashes hasn't been changed
    assert parent_hash == prev_hashes[0]
    # Make sure that block1 hasn't been changed
    assert block1.header.state_root == initial_state_root
    execution_context = ExecutionContext.from_block_header(block.header, prev_hashes)
    vm_state1 = FrontierVMState(
        chaindb=chaindb1,
        execution_context=execution_context,
        state_root=block.header.state_root,
        receipts=computation.vm_state.receipts,
    )
    computation, block, _ = vm_state1.apply_transaction(
        tx2,
        block,
    )
    access_logs2 = computation.vm_state.access_logs
    post_vm_state = computation.vm_state

    # Check AccessLogs
    witness_db = BaseChainDB(MemoryDB(access_logs2.writes))
    state_db = witness_db.get_state_db(block.header.state_root, read_only=True)
    assert state_db.get_balance(recipient2) == amount
    with pytest.raises(KeyError):
        _ = state_db.get_balance(recipient1)

    # Check block data are correct
    assert block.header.state_root == result_block.header.state_root
    assert block.header.gas_limit == result_block.header.gas_limit
    assert block.header.gas_used == result_block.header.gas_used
    assert block.header.transaction_root == result_block.header.transaction_root
    assert block.header.receipt_root == result_block.header.receipt_root

    # Make sure that vm_state1 hasn't been changed
    assert post_vm_state.state_root == result_block.header.state_root

    # (3) Testing using witness as db data
    # Witness_db
    block2 = copy.deepcopy(block0)

    witness_db = BaseChainDB(MemoryDB(access_logs1.reads))
    prev_hashes = vm.get_prev_hashes(
        last_block_hash=prev_block_hash,
        db=vm.chaindb,
    )
    execution_context = ExecutionContext.from_block_header(block2.header, prev_hashes)
    # Apply the first transaction
    vm_state2 = FrontierVMState(
        chaindb=witness_db,
        execution_context=execution_context,
        state_root=block2.header.state_root,
        receipts=[],
    )
    computation, block, _ = vm_state2.apply_transaction(
        tx1,
        block2,
    )

    # Update witness_db
    recent_trie_nodes = merge(access_logs2.reads, access_logs1.writes)
    witness_db = BaseChainDB(MemoryDB(recent_trie_nodes))
    execution_context = ExecutionContext.from_block_header(block.header, prev_hashes)
    # Apply the second transaction
    vm_state2 = FrontierVMState(
        chaindb=witness_db,
        execution_context=execution_context,
        state_root=block.header.state_root,
        receipts=computation.vm_state.receipts,
    )
    computation, block, _ = vm_state2.apply_transaction(
        tx2,
        block,
    )

    # After applying
    assert block.header.state_root == computation.vm_state.state_root
    assert block.header.transaction_root == result_block.header.transaction_root
    assert block.header.receipt_root == result_block.header.receipt_root
    assert block.hash == result_block.hash

    # (3) Testing using witness_db and block_header to reconstruct vm_state
    prev_hashes = vm.get_prev_hashes(
        last_block_hash=prev_block_hash,
        db=vm.chaindb,
    )
    execution_context = ExecutionContext.from_block_header(block.header, prev_hashes)
    vm_state3 = FrontierVMState(
        chaindb=witness_db,
        execution_context=execution_context,
        state_root=block.header.state_root,
    )
    assert vm_state3.state_root == post_vm_state.state_root
    assert vm_state3.state_root == result_block.header.state_root
Beispiel #26
0
parser.add_argument('-debug', action='store_true')
args = parser.parse_args()

print("Logging to", LOGFILE)
if args.debug:
    LOGLEVEL = logging.DEBUG
logging.basicConfig(level=LOGLEVEL, filename=LOGFILE)

DemoLightChain = LightChain.configure(
    name='Demo LightChain',
    privkey=ecies.generate_privkey(),
    vm_configuration=MAINNET_VM_CONFIGURATION,
    network_id=ROPSTEN_NETWORK_ID,
)

chaindb = BaseChainDB(LevelDB(args.db))
try:
    chaindb.get_canonical_head()
except CanonicalHeadNotFound:
    # We're starting with a fresh DB.
    chain = DemoLightChain.from_genesis_header(chaindb, ROPSTEN_GENESIS_HEADER)
else:
    # We're reusing an existing db.
    chain = DemoLightChain(chaindb)

loop = asyncio.get_event_loop()
t = threading.Thread(target=loop.run_until_complete,
                     args=(chain.run(), ),
                     daemon=True)
t.start()
Beispiel #27
0
    # The default remoteid can be used if you pass nodekeyhex as above to geth.
    nodekey = keys.PrivateKey(decode_hex(
        "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8"))
    remoteid = nodekey.public_key.to_hex()
    parser = argparse.ArgumentParser()
    parser.add_argument('-remoteid', type=str, default=remoteid)
    parser.add_argument('-db', type=str)
    parser.add_argument('-mainnet', action="store_true")
    args = parser.parse_args()

    remote = Node(
        keys.PublicKey(decode_hex(args.remoteid)),
        Address('127.0.0.1', 30303, 30303))

    if args.db is not None:
        chaindb = BaseChainDB(LevelDB(args.db))
    else:
        chaindb = BaseChainDB(MemoryDB())

    genesis_header = ROPSTEN_GENESIS_HEADER
    chain_class = RopstenChain
    if args.mainnet:
        genesis_header = MAINNET_GENESIS_HEADER
        chain_class = MainnetChain

    try:
        chaindb.get_canonical_head()
    except CanonicalHeadNotFound:
        # We're starting with a fresh DB.
        chain = chain_class.from_genesis_header(chaindb, genesis_header)
    else: