예제 #1
0
def debug_find_diff_chain_head_hash_between_historical_root_hashes():
    # this is from bootnode 1
    historical_root_hash_time_1 = 1564233000
    historical_root_hash_time_2 = 1564234000

    testdb_before = LevelDB(
        "/home/tommy/.local/share/helios/mainnet/chain/full_before_rebuild/")
    testdb_after = LevelDB(
        "/home/tommy/.local/share/helios/mainnet/chain/full/")
    #testdb = LevelDB("/home/tommy/.local/share/helios/bootnode_1_july_30/mainnet/chain/full/")

    testdb_before = JournalDB(testdb_before)
    testdb_after = JournalDB(testdb_after)
    #testdb = ReadOnlyDB(testdb)

    chain_before = MainnetChain(
        testdb_before, private_keys[0].public_key.to_canonical_address(),
        private_keys[0])
    chain_after = MainnetChain(
        testdb_after, private_keys[0].public_key.to_canonical_address(),
        private_keys[0])

    historical_root_hashes_before = chain_before.chain_head_db.get_historical_root_hashes(
    )
    historical_root_hashes_after = chain_after.chain_head_db.get_historical_root_hashes(
    )

    print(historical_root_hashes_before)
    print(historical_root_hashes_after)

    _print_chronological_blocks_and_actual_head_hash_diff(
        chain_before, historical_root_hash_time_1, historical_root_hash_time_2)
예제 #2
0
def add_transactions_to_blockchain_db(base_db, tx_list: List):
    # sort by time
    tx_list.sort(key=lambda x: x[3])

    for tx_key in tx_list:
        sender_priv_key = tx_key[0]
        receive_priv_key = tx_key[1]
        amount = tx_key[2]
        tx_timestamp = int(tx_key[3])

        if len(tx_key) > 4:
            gas_price = to_wei(tx_key[4], 'gwei')
        else:
            gas_price = to_wei(1, 'gwei')

        total_gas = gas_price
        sender_chain = TestnetChain(base_db, sender_priv_key.public_key.to_canonical_address(), sender_priv_key)
        dummy_sender_chain = TestnetChain(JournalDB(base_db), sender_priv_key.public_key.to_canonical_address(),
                                          sender_priv_key)

        dummy_sender_chain.create_and_sign_transaction_for_queue_block(
            gas_price=gas_price,
            gas=GAS_TX,
            to=receive_priv_key.public_key.to_canonical_address(),
            value=amount,
            data=b"",
            v=0,
            r=0,
            s=0
        )

        # import the block into the dummy chain to complete it and make sure it is valid
        imported_block = dummy_sender_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(
            header=imported_block.header.copy(timestamp=tx_timestamp).get_signed(sender_priv_key,
                                                                                 dummy_sender_chain.network_id))
        sender_chain.import_block(timestamp_modified_imported_block, allow_unprocessed=False)

        # then receive the transactions

        dummy_receiver_chain = TestnetChain(JournalDB(base_db), receive_priv_key.public_key.to_canonical_address(),
                                            receive_priv_key)
        dummy_receiver_chain.populate_queue_block_with_receive_tx()
        imported_block = dummy_receiver_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(
            header=imported_block.header.copy(timestamp=tx_timestamp).get_signed(receive_priv_key,
                                                                                  dummy_receiver_chain.network_id))
        # print('XXXXXXXXXX')
        # print(tx_timestamp)
        receiver_chain = TestnetChain(base_db, receive_priv_key.public_key.to_canonical_address(), receive_priv_key)
        receiver_chain.import_block(timestamp_modified_imported_block, allow_unprocessed=False)
예제 #3
0
    def __init__(self, db, state_root=BLANK_ROOT_HASH):
        r"""
        Internal implementation details (subject to rapid change):
        Database entries go through several pipes, like so...

        .. code::

                                                                    -> hash-trie -> storage lookups
                                                                  /
            db > _batchdb ---------------------------> _journaldb ----------------> code lookups
             \
              -> _batchtrie -> _trie -> _trie_cache -> _journaltrie --------------> account lookups

        Journaling sequesters writes at the _journal* attrs ^, until persist is called.

        _batchtrie enables us to prune all trie changes while building
        state,  without deleting old trie roots.

        _batchdb and _batchtrie together enable us to make the state root,
        without saving everything to the database.

        _journaldb is a journaling of the keys and values used to store
        code and account storage.

        _trie is a hash-trie, used to generate the state root

        _trie_cache is a cache tied to the state root of the trie. It
        is important that this cache is checked *after* looking for
        the key in _journaltrie, because the cache is only invalidated
        after a state root change.

        _journaltrie is a journaling of the accounts (an address->rlp_templates mapping,
        rather than the nodes stored by the trie). This enables
        a squashing of all account changes before pushing them into the trie.

        .. NOTE:: There is an opportunity to do something similar for storage

        AccountDB synchronizes the snapshot/revert/persist of both of the
        journals.
        """
        self.db = db
        self._batchdb = BatchDB(db)
        self._batchtrie = BatchDB(db)
        self._journaldb = JournalDB(self._batchdb)
        self._trie = HashTrie(
            HexaryTrie(self._batchtrie, state_root, prune=True))
        self._trie_cache = CacheDB(self._trie)
        self._journaltrie = JournalDB(self._trie_cache)
예제 #4
0
def db(request):
    base_db = MemoryDB()
    if request.param is JournalDB:
        return JournalDB(base_db)
    elif request.param is BatchDB:
        return BatchDB(base_db)
    elif request.param is MemoryDB:
        return base_db
    else:
        raise Exception("Invariant")
예제 #5
0
    def __init__(self, db):
        r"""
        Internal implementation details (subject to rapid change):

        Journaling sequesters writes at the _journal* attrs ^, until persist is called.

        _batchdb and _batchtrie together enable us to make the state root,
        without saving everything to the database.

        _journaldb is a journaling of the keys and values used to store
        code and account storage.

        TODO: add cache
        _trie_cache is a cache tied to the state root of the trie. It
        is important that this cache is checked *after* looking for
        the key in _journaltrie, because the cache is only invalidated
        after a state root change.

        AccountDB synchronizes the snapshot/revert/persist the
        journal.
        """
        self.db = db
        self._batchdb = BatchDB(db)
        self._journaldb = JournalDB(self._batchdb)
예제 #6
0
def create_valid_block_at_timestamp(base_db, private_key, transactions = None, receive_transactions = None, reward_bundle = None, timestamp = None):
    '''
    Tries to create a valid block based in the invalid block. The transactions and reward bundle must already be valid
    :param base_db:
    :param private_key:
    :param invalid_block:
    :return:
    '''
    if timestamp == None:
        timestamp = int(time.time())

    chain = TestnetChain(JournalDB(base_db), private_key.public_key.to_canonical_address(), private_key)

    queue_block = chain.get_queue_block()
    queue_block = queue_block.copy(header = queue_block.header.copy(timestamp = timestamp),
                                   transactions=transactions,
                                   receive_transactions=receive_transactions,
                                   reward_bundle=reward_bundle)


    valid_block = chain.get_vm(timestamp = timestamp).import_block(queue_block, validate = False, private_key = chain.private_key)


    return valid_block
예제 #7
0
class AccountDB(BaseAccountDB):

    logger = logging.getLogger('hvm.db.account.AccountDB')

    def __init__(self, db):
        r"""
        Internal implementation details (subject to rapid change):

        Journaling sequesters writes at the _journal* attrs ^, until persist is called.

        _batchdb and _batchtrie together enable us to make the state root,
        without saving everything to the database.

        _journaldb is a journaling of the keys and values used to store
        code and account storage.

        TODO: add cache
        _trie_cache is a cache tied to the state root of the trie. It
        is important that this cache is checked *after* looking for
        the key in _journaltrie, because the cache is only invalidated
        after a state root change.

        AccountDB synchronizes the snapshot/revert/persist the
        journal.
        """
        self.db = db
        self._batchdb = BatchDB(db)
        self._journaldb = JournalDB(self._batchdb)


    #
    # Storage
    #
    def get_storage(self, address, slot):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(slot, title="Storage Slot")

        account = self._get_account(address)
        storage = HashTrie(HexaryTrie(self._journaldb, account.storage_root))

        slot_as_key = pad32(int_to_big_endian(slot))

        if slot_as_key in storage:
            encoded_value = storage[slot_as_key]
            return rlp.decode(encoded_value, sedes=rlp.sedes.big_endian_int)
        else:
            return 0

    def set_storage(self, address, slot, value):
        validate_uint256(value, title="Storage Value")
        validate_uint256(slot, title="Storage Slot")
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        storage = HashTrie(HexaryTrie(self._journaldb, account.storage_root))

        slot_as_key = pad32(int_to_big_endian(slot))

        if value:
            encoded_value = rlp.encode(value)
            storage[slot_as_key] = encoded_value
        else:
            del storage[slot_as_key]

        self._set_account(address, account.copy(storage_root=storage.root_hash))

    def delete_storage(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        self._set_account(address, account.copy(storage_root=BLANK_ROOT_HASH))

    #
    # Balance
    #
    def get_balance(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.balance

    def set_balance(self, address, balance):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(balance, title="Account Balance")

        account = self._get_account(address)
        self._set_account(address, account.copy(balance=balance))

    #
    # Nonce
    #
    def get_nonce(self, address: Address) -> int:
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.nonce

    def set_nonce(self, address, nonce):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(nonce, title="Nonce")

        account = self._get_account(address)
        self._set_account(address, account.copy(nonce=nonce))

    def increment_nonce(self, address):
        current_nonce = self.get_nonce(address)
        self.set_nonce(address, current_nonce + 1)

    #
    # Block number
    #
    def get_block_number(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.block_number

    def set_block_number(self, address, block_number):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(block_number, title="Block Number")

        account = self._get_account(address)
        self._set_account(address, account.copy(block_number=block_number))

    def increment_block_number(self, address):
        current_block_number = self.get_block_number(address)
        self.set_block_number(address, current_block_number + 1)
        
       
    #
    # Receivable Transactions
    #
    def get_receivable_transactions(self, address: Address) -> List[TransactionKey]:
        validate_canonical_address(address, title="Storage Address")
        account = self._get_account(address)
        return account.receivable_transactions
    
    def has_receivable_transactions(self, address: Address) -> bool:
        tx = self.get_receivable_transactions(address)
        if len(tx) == 0:
            return False
        else:
            return True
        
    def get_receivable_transaction(self, address: Address, transaction_hash: Hash32) -> Optional[TransactionKey]:
        validate_is_bytes(transaction_hash, title="Transaction Hash")
        all_tx = self.get_receivable_transactions(address)
        for tx_key in all_tx:
            if tx_key.transaction_hash == transaction_hash:
                return tx_key
        return None
        
     
    def add_receivable_transactions(self, address: Address, transaction_keys: TransactionKey) -> None:
        validate_canonical_address(address, title="Wallet Address")
        for tx_key in transaction_keys:
            self.add_receivable_transaction(address, tx_key.transaction_hash, tx_key.sender_block_hash)
            
    def add_receivable_transaction(self, address: Address, transaction_hash: Hash32, sender_block_hash: Hash32, is_contract_deploy:bool = False) -> None:
        validate_canonical_address(address, title="Wallet Address")
        validate_is_bytes(transaction_hash, title="Transaction Hash")
        validate_is_bytes(sender_block_hash, title="Sender Block Hash")
        
        #this is the wallet address people send money to when slashed. It is a sink
        if address == SLASH_WALLET_ADDRESS:
            return


        account = self._get_account(address)
        receivable_transactions = account.receivable_transactions

        # first lets make sure we don't already have the transaction
        for tx_key in receivable_transactions:
            if tx_key.transaction_hash == transaction_hash:
                raise ValueError("Tried to save a receivable transaction that was already saved. TX HASH = {}".format(encode_hex(transaction_hash)))

        
        new_receivable_transactions = receivable_transactions + (TransactionKey(transaction_hash, sender_block_hash), )
        
        
        #self.logger.debug(new_receivable_transactions)
        self.logger.debug("Adding receivable transaction {} to account {}".format(encode_hex(transaction_hash), encode_hex(address)))
        self._set_account(address, account.copy(receivable_transactions=new_receivable_transactions))

        #finally, if this is a smart contract, lets add it to the list of smart contracts with pending transactions
        if is_contract_deploy or self.get_code_hash(address) != EMPTY_SHA3:
            self.logger.debug("Adding address to list of smart contracts with pending transactions")
            #we only need to run this when adding the first one.
            self._add_address_to_smart_contracts_with_pending_transactions(address)
        
    def delete_receivable_transaction(self, address: Address, transaction_hash: Hash32) -> None:
        validate_canonical_address(address, title="Storage Address")
        validate_is_bytes(transaction_hash, title="Transaction Hash")
        
        self.logger.debug("deleting receivable tx {} from account {}".format(encode_hex(transaction_hash), encode_hex(address)))
        account = self._get_account(address)
        receivable_transactions = list(self.get_receivable_transactions(address))
        i = 0
        found = False
        for tx_key in receivable_transactions:
            if tx_key.transaction_hash == transaction_hash:
                found = True
                break
            i +=1
            
        if found == True:
            del receivable_transactions[i]
        else:
            raise ReceivableTransactionNotFound("transaction hash {0} not found in receivable_transactions database for wallet {1}".format(transaction_hash, address))
        
        self._set_account(address, account.copy(receivable_transactions=tuple(receivable_transactions)))

        if self.get_code_hash(address) != EMPTY_SHA3:
            if len(receivable_transactions) == 0:
                self.logger.debug("Removing address from list of smart contracts with pending transactions")
                self._remove_address_from_smart_contracts_with_pending_transactions(address)
    
    
    #
    # Code
    #
    def get_code(self, address):
        validate_canonical_address(address, title="Storage Address")

        try:
            return self._journaldb[self.get_code_hash(address)]
        except KeyError:
            return b""

    def set_code(self, address, code):
        validate_canonical_address(address, title="Storage Address")
        validate_is_bytes(code, title="Code")

        account = self._get_account(address)

        code_hash = keccak(code)
        self._journaldb[code_hash] = code
        self._set_account(address, account.copy(code_hash=code_hash))

    def get_code_hash(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.code_hash

    def delete_code(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        self._set_account(address, account.copy(code_hash=EMPTY_SHA3))


    #
    # Internal use smart contract transaction queue system
    #
    def _add_address_to_smart_contracts_with_pending_transactions(self, address: Address) -> None:
        key = SchemaV1.make_smart_contracts_with_pending_transactions_lookup_key()

        address_set = set(self.get_smart_contracts_with_pending_transactions())

        address_set.add(address)

        self.db[key] = rlp.encode(list(address_set), sedes=rlp.sedes.FCountableList(address))

    def _remove_address_from_smart_contracts_with_pending_transactions(self, address: Address) -> None:
        key = SchemaV1.make_smart_contracts_with_pending_transactions_lookup_key()

        address_set = set(self.get_smart_contracts_with_pending_transactions())

        address_set.remove(address)

        self.db[key] = rlp.encode(list(address_set), sedes=rlp.sedes.FCountableList(address))

    def has_pending_smart_contract_transactions(self, address: Address) -> bool:
        validate_canonical_address(address, title="Storage Address")
        address_set = set(self.get_smart_contracts_with_pending_transactions())
        return address in address_set

    def get_smart_contracts_with_pending_transactions(self) -> List[Address]:
        key = SchemaV1.make_smart_contracts_with_pending_transactions_lookup_key()

        try:
            address_list = rlp.decode(self.db[key], sedes=rlp.sedes.FCountableList(address), use_list=True)
            return address_list
        except KeyError:
            return []

    #
    # Account Methods
    #
    def account_has_code_or_nonce(self, address):
        return self.get_nonce(address) != 0 or self.account_has_code(address)

    def account_has_code(self, address: Address) -> bool:

        return self.get_code_hash(address) != EMPTY_SHA3

    def delete_account(self, address):
        validate_canonical_address(address, title="Storage Address")
        account_lookup_key = SchemaV1.make_account_lookup_key(address)
        #try:
        del self._journaldb[account_lookup_key]
        #except KeyError:
        #    self.logger.debug("tried to delete an account that doesnt exist")

    def account_exists(self, address):
        validate_canonical_address(address, title="Storage Address")
        account_lookup_key = SchemaV1.make_account_lookup_key(address)
        
        return self._journaldb.get(account_lookup_key, b'') != b''

    def touch_account(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        self._set_account(address, account)

    def account_is_empty(self, address):
        return not self.account_has_code_or_nonce(address) and self.get_balance(address) == 0 and self.has_receivable_transactions(address) is False
    
    def get_account_hash(self, address: Address) -> Hash32:
        account = self._get_account(address)
        account_hashable = account.copy(
            receivable_transactions = (),
            block_conflicts = (),
        )
        account_hashable_encoded = rlp.encode(account_hashable, sedes=Account)
        return keccak(account_hashable_encoded)
    
    #
    # Internal
    #
    def _get_account(self, address):
        account_lookup_key = SchemaV1.make_account_lookup_key(address)
        rlp_account = self._journaldb.get(account_lookup_key, b'')
        if rlp_account:
            account = rlp.decode(rlp_account, sedes=Account)
            #account = hm_decode(rlp_account, sedes_classes=[Account])
        else:
            account = Account()
        return account


    def _set_account(self, address, account):
        encoded_account = rlp.encode(account, sedes=Account)
        #encoded_account = hm_encode(account)
        account_lookup_key = SchemaV1.make_account_lookup_key(address)
        self._journaldb[account_lookup_key] = encoded_account
        

    #
    # Record and discard API
    #
    def record(self) -> UUID:
        self.logger.debug("Recording account db changeset")
        return (self._journaldb.record())

    def discard(self, changeset: UUID) -> None:
        self.logger.debug("Discarding account db changes")
        db_changeset = changeset
        self._journaldb.discard(db_changeset)

    def commit(self, changeset: UUID) -> None:
        db_changeset = changeset
        self._journaldb.commit(db_changeset)

    def persist(self, save_account_hash = False, wallet_address = None) -> None:
        self.logger.debug('Persisting account db. save_account_hash {} | wallet_address {}'.format(save_account_hash, wallet_address))
        self._journaldb.persist()
        self._batchdb.commit(apply_deletes=True)
        
        if save_account_hash:
            validate_canonical_address(wallet_address, title="Address")
            self.save_current_account_with_hash_lookup(wallet_address)
      
    #
    # Saving account state at particular account hash
    #
    
    def save_current_account_with_hash_lookup(self, wallet_address):
        validate_canonical_address(wallet_address, title="Address")
        account_hash = self.get_account_hash(wallet_address)
        account = self._get_account(wallet_address)
        rlp_account = rlp.encode(account, sedes=Account)
        
        lookup_key = SchemaV1.make_account_by_hash_lookup_key(account_hash)
        self.db[lookup_key] = rlp_account
        
    
    def revert_to_account_from_hash(self, account_hash, wallet_address):
        validate_canonical_address(wallet_address, title="Address")
        validate_is_bytes(account_hash, title="account_hash")
        lookup_key = SchemaV1.make_account_by_hash_lookup_key(account_hash)
        try:
            rlp_encoded = self.db[lookup_key]
            account = rlp.decode(rlp_encoded, sedes=Account)
            self._set_account(wallet_address, account)
        except KeyError:
            raise StateRootNotFound()
from hvm.db.backends.level import LevelDB
from hvm.db.journal import JournalDB


def fix_blockchain_database_errors(base_db):
    '''
    Checks to make sure all chains match what is expected from saved chain head root hash
    :param base_db:
    :return:
    '''
    node_1 = MainnetChain(
        base_db,
        GENESIS_PRIVATE_KEY_FOR_TESTNET.public_key.to_canonical_address(),
        GENESIS_PRIVATE_KEY_FOR_TESTNET)
    chain_head_hashes = node_1.chain_head_db.get_head_block_hashes_list()

    for head_hash in chain_head_hashes:
        address = node_1.chaindb.get_chain_wallet_address_for_block_hash(
            head_hash)
        # make sure the head block matches the expected head_hash
        chain_head_header = node_1.chaindb.get_canonical_head_hash(address)

        if chain_head_header != head_hash:
            print('f**k')
            exit()


base_db = JournalDB(
    LevelDB('/home/tommy/.local/share/helios/instance_0/chain'))
fix_blockchain_database_errors(base_db)
예제 #9
0
def create_dev_fixed_blockchain_database(base_db, key_balance_dict, use_real_genesis = False):
    logger.debug("generating test fixed blockchain db")

    earliest_timestamp = int(time.time())
    required_total_supply = 0
    for balance_timestamp in key_balance_dict.values():
        required_total_supply += balance_timestamp[0]
        if balance_timestamp[1] < earliest_timestamp:
            earliest_timestamp = balance_timestamp[1]

    required_total_supply = required_total_supply*2

    #initialize db
    if use_real_genesis:
        sender_chain = import_genesis_block(base_db)
    else:
        genesis_params, genesis_state = create_new_genesis_params_and_state(TESTNET_GENESIS_PRIVATE_KEY, required_total_supply, earliest_timestamp - 100000)
        sender_chain = TestnetChain.from_genesis(base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), genesis_params, genesis_state)

    sender_chain.chaindb.initialize_historical_minimum_gas_price_at_genesis(min_gas_price=1, net_tpc_cap=5)

    prev_timestamp = 0
    for priv_key, balance_timestamp in key_balance_dict.items():
        sender_chain = TestnetChain(base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), TESTNET_GENESIS_PRIVATE_KEY)

        dummy_sender_chain = TestnetChain(JournalDB(base_db), TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), TESTNET_GENESIS_PRIVATE_KEY)

        balance = balance_timestamp[0]
        timestamp = balance_timestamp[1]
        if timestamp < prev_timestamp:
            raise ValueError("timestamps must be in ascending order")

        receiver_privkey = priv_key

        dummy_sender_chain.create_and_sign_transaction_for_queue_block(
                gas_price=0x01,
                gas=0x0c3500,
                to=receiver_privkey.public_key.to_canonical_address(),
                value=balance,
                data=b"",
                v=0,
                r=0,
                s=0
                )



        # import the block into the dummy chain to complete it and make sure it is valid
        imported_block = dummy_sender_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(header = imported_block.header.copy(timestamp = timestamp).get_signed(TESTNET_GENESIS_PRIVATE_KEY, dummy_sender_chain.network_id))
        sender_chain.import_block(timestamp_modified_imported_block, allow_unprocessed = False)

        #logger.debug("Receiving ")

        #then receive the transactions
        receiver_chain = TestnetChain(base_db, receiver_privkey.public_key.to_canonical_address(), receiver_privkey)
        dummy_receiver_chain = TestnetChain(JournalDB(base_db), receiver_privkey.public_key.to_canonical_address(), receiver_privkey)
        dummy_receiver_chain.populate_queue_block_with_receive_tx()
        imported_block = dummy_receiver_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(header=imported_block.header.copy(timestamp=timestamp).get_signed(receiver_privkey, dummy_receiver_chain.network_id))
        receiver_chain.import_block(timestamp_modified_imported_block, allow_unprocessed=False)


    logger.debug("finished creating fixed blockchain")
예제 #10
0
class AccountDB(BaseAccountDB):

    logger = logging.getLogger('hvm.db.account.AccountDB')

    def __init__(self, db, state_root=BLANK_ROOT_HASH):
        r"""
        Internal implementation details (subject to rapid change):
        Database entries go through several pipes, like so...

        .. code::

                                                                    -> hash-trie -> storage lookups
                                                                  /
            db > _batchdb ---------------------------> _journaldb ----------------> code lookups
             \
              -> _batchtrie -> _trie -> _trie_cache -> _journaltrie --------------> account lookups

        Journaling sequesters writes at the _journal* attrs ^, until persist is called.

        _batchtrie enables us to prune all trie changes while building
        state,  without deleting old trie roots.

        _batchdb and _batchtrie together enable us to make the state root,
        without saving everything to the database.

        _journaldb is a journaling of the keys and values used to store
        code and account storage.

        _trie is a hash-trie, used to generate the state root

        _trie_cache is a cache tied to the state root of the trie. It
        is important that this cache is checked *after* looking for
        the key in _journaltrie, because the cache is only invalidated
        after a state root change.

        _journaltrie is a journaling of the accounts (an address->rlp_templates mapping,
        rather than the nodes stored by the trie). This enables
        a squashing of all account changes before pushing them into the trie.

        .. NOTE:: There is an opportunity to do something similar for storage

        AccountDB synchronizes the snapshot/revert/persist of both of the
        journals.
        """
        self.db = db
        self._batchdb = BatchDB(db)
        self._batchtrie = BatchDB(db)
        self._journaldb = JournalDB(self._batchdb)
        self._trie = HashTrie(
            HexaryTrie(self._batchtrie, state_root, prune=True))
        self._trie_cache = CacheDB(self._trie)
        self._journaltrie = JournalDB(self._trie_cache)

    @property
    def state_root(self):
        return self._trie.root_hash

    @state_root.setter
    def state_root(self, value):
        self._trie_cache.reset_cache()
        self._trie.root_hash = value

    def has_root(self, state_root: bytes) -> bool:
        return state_root in self._batchtrie

    #
    # Storage
    #
    def get_storage(self, address, slot):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(slot, title="Storage Slot")

        account = self._get_account(address)
        storage = HashTrie(HexaryTrie(self._journaldb, account.storage_root))

        slot_as_key = pad32(int_to_big_endian(slot))

        if slot_as_key in storage:
            encoded_value = storage[slot_as_key]
            return rlp.decode(encoded_value, sedes=rlp.sedes.big_endian_int)
        else:
            return 0

    def set_storage(self, address, slot, value):
        validate_uint256(value, title="Storage Value")
        validate_uint256(slot, title="Storage Slot")
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        storage = HashTrie(HexaryTrie(self._journaldb, account.storage_root))

        slot_as_key = pad32(int_to_big_endian(slot))

        if value:
            encoded_value = rlp.encode(value)
            storage[slot_as_key] = encoded_value
        else:
            del storage[slot_as_key]

        self._set_account(address,
                          account.copy(storage_root=storage.root_hash))

    def delete_storage(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        self._set_account(address, account.copy(storage_root=BLANK_ROOT_HASH))

    #
    # Balance
    #
    def get_balance(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.balance

    def set_balance(self, address, balance):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(balance, title="Account Balance")

        account = self._get_account(address)
        self._set_account(address, account.copy(balance=balance))

    #
    # Nonce
    #
    def get_nonce(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.nonce

    def set_nonce(self, address, nonce):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(nonce, title="Nonce")

        account = self._get_account(address)
        self._set_account(address, account.copy(nonce=nonce))

    def increment_nonce(self, address):
        current_nonce = self.get_nonce(address)
        self.set_nonce(address, current_nonce + 1)

    #
    # Block number
    #
    def get_block_number(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.block_number

    def set_block_number(self, address, block_number):
        validate_canonical_address(address, title="Storage Address")
        validate_uint256(block_number, title="Block Number")

        account = self._get_account(address)
        self._set_account(address, account.copy(block_number=block_number))

    def increment_block_number(self, address):
        current_block_number = self.get_block_number(address)
        self.set_block_number(address, current_block_number + 1)

    #
    # Receivable Transactions
    #
    def get_receivable_transactions(self, address):
        validate_canonical_address(address, title="Storage Address")
        account = self._get_account(address)
        return account.receivable_transactions

    def has_receivable_transactions(self, address):
        tx = self.get_receivable_transactions(address)
        if len(tx) == 0:
            return False
        else:
            return True

    def get_receivable_transaction(self, address, transaction_hash):
        validate_is_bytes(transaction_hash, title="Transaction Hash")
        all_tx = self.get_receivable_transactions(address)
        for tx_key in all_tx:
            if tx_key.transaction_hash == transaction_hash:
                return tx_key
        return False

    def add_receivable_transaction(self, address, transaction_hash,
                                   sender_block_hash):
        validate_canonical_address(address, title="Storage Address")
        validate_is_bytes(transaction_hash, title="Transaction Hash")
        validate_is_bytes(sender_block_hash, title="Sender Block Hash")

        #this is the wallet address people send money to when slashed. It is a sink
        if address == SLASH_WALLET_ADDRESS:
            return

        #first lets make sure we don't already have the transaction
        if self.get_receivable_transaction(address,
                                           transaction_hash) is not False:
            raise ValueError(
                "Tried to save a receivable transaction that was already saved"
            )

        account = self._get_account(address)
        receivable_transactions = account.receivable_transactions

        new_receivable_transactions = receivable_transactions + (
            TransactionKey(transaction_hash, sender_block_hash), )

        self.logger.debug(
            "adding receivable transaction {}".format(transaction_hash))
        #self.logger.debug(new_receivable_transactions)

        self._set_account(
            address,
            account.copy(receivable_transactions=new_receivable_transactions))

    def delete_receivable_transaction(self, address, transaction_hash):
        validate_canonical_address(address, title="Storage Address")
        validate_is_bytes(transaction_hash, title="Transaction Hash")

        self.logger.debug("deleting receivable tx {}".format(transaction_hash))
        account = self._get_account(address)
        receivable_transactions = list(
            self.get_receivable_transactions(address))
        i = 0
        found = False
        for tx_key in receivable_transactions:
            if tx_key.transaction_hash == transaction_hash:
                found = True
                break
            i += 1

        if found == True:
            del receivable_transactions[i]
        else:
            raise ValueError(
                "transaction hash {0} not found in receivable_transactions database for wallet {1}"
                .format(transaction_hash, address))

        self._set_account(
            address,
            account.copy(
                receivable_transactions=tuple(receivable_transactions)))

    #
    # Code
    #
    def get_code(self, address):
        validate_canonical_address(address, title="Storage Address")

        try:
            return self._journaldb[self.get_code_hash(address)]
        except KeyError:
            return b""

    def set_code(self, address, code):
        validate_canonical_address(address, title="Storage Address")
        validate_is_bytes(code, title="Code")

        account = self._get_account(address)

        code_hash = keccak(code)
        self._journaldb[code_hash] = code
        self._set_account(address, account.copy(code_hash=code_hash))

    def get_code_hash(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        return account.code_hash

    def delete_code(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        self._set_account(address, account.copy(code_hash=EMPTY_SHA3))

    #
    # Account Methods
    #
    def account_has_code_or_nonce(self, address):
        return self.get_nonce(address) != 0 or self.get_code_hash(
            address) != EMPTY_SHA3

    def delete_account(self, address):
        validate_canonical_address(address, title="Storage Address")

        del self._journaltrie[address]

    def account_exists(self, address):
        validate_canonical_address(address, title="Storage Address")

        return self._journaltrie.get(address, b'') != b''

    def touch_account(self, address):
        validate_canonical_address(address, title="Storage Address")

        account = self._get_account(address)
        self._set_account(address, account)

    def account_is_empty(self, address):
        return not self.account_has_code_or_nonce(
            address) and self.get_balance(
                address
            ) == 0 and self.has_receivable_transactions(address) is False

    def get_account_hash(self, address):
        account = self._get_account(address)
        account_hashable = account.copy(receivable_transactions=())
        account_hashable_encoded = rlp.encode(account_hashable)
        return keccak(account_hashable_encoded)

    #
    # Internal
    #
    def _get_account(self, address):
        rlp_account = self._journaltrie.get(address, b'')
        if rlp_account:
            account = rlp.decode(rlp_account, sedes=Account)
        else:
            account = Account()
        return account

    def _set_account(self, address, account):
        rlp_account = rlp.encode(account, sedes=Account)
        self._journaltrie[address] = rlp_account

    #
    # Record and discard API
    #
    def record(self) -> Tuple[UUID, UUID]:
        return (self._journaldb.record(), self._journaltrie.record())

    def discard(self, changeset: Tuple[UUID, UUID]) -> None:
        db_changeset, trie_changeset = changeset
        self._journaldb.discard(db_changeset)
        self._journaltrie.discard(trie_changeset)

    def commit(self, changeset: Tuple[UUID, UUID]) -> None:
        db_changeset, trie_changeset = changeset
        self._journaldb.commit(db_changeset)
        self._journaltrie.commit(trie_changeset)

    def make_state_root(self) -> Hash32:
        self.logger.debug("Generating AccountDB trie")
        self._journaldb.persist()
        self._journaltrie.persist()
        return self.state_root

    def persist(self, save_state_root=False) -> None:
        self.make_state_root()
        self._batchtrie.commit(apply_deletes=False)
        self._batchdb.commit(apply_deletes=True)
        if save_state_root:
            self.save_current_state_root()

    def _log_pending_accounts(self) -> None:
        accounts_displayed = set()  # type: Set[bytes]
        queued_changes = self._journaltrie.journal.journal_data.items()
        # mypy bug for ordered dict reversibility: https://github.com/python/typeshed/issues/2078
        for checkpoint, accounts in reversed(queued_changes):  # type: ignore
            for address in accounts:
                if address in accounts_displayed:
                    continue
                else:
                    accounts_displayed.add(address)
                    account = self._get_account(address)
                    self.logger.debug(
                        "Account %s: balance %d, nonce %d, storage root %s, code hash %s",
                        encode_hex(address),
                        account.balance,
                        account.nonce,
                        encode_hex(account.storage_root),
                        encode_hex(account.code_hash),
                    )

    def save_current_state_root(self) -> None:
        """
        Saves the current state_root to the database to be loaded later
        """
        self.logger.debug("Saving current state root")
        #if self.state_root==BLANK_ROOT_HASH:
        #    raise ValueError("cannot save state root because it is BLANK_ROOT_HASH")
        current_state_root_lookup_key = SchemaV1.make_current_state_root_lookup_key(
        )

        self.db.set(
            current_state_root_lookup_key,
            rlp.encode(self.state_root, sedes=trie_root),
        )

    @classmethod
    def get_saved_state_root(cls, db) -> Hash32:
        """
        Loads the last saved state root
        """

        current_state_root_lookup_key = SchemaV1.make_current_state_root_lookup_key(
        )
        try:
            loaded_state_root = rlp.decode(db[current_state_root_lookup_key],
                                           sedes=trie_root)
        except KeyError:
            raise ValueError("There is no saved state root to load")

        return loaded_state_root
예제 #11
0
def debug_test_1():
    testdb = LevelDB("/home/tommy/.local/share/helios/mainnet/chain/full/")

    testdb = JournalDB(testdb)
    testdb = ReadOnlyDB(testdb)

    chain = MainnetChain(testdb,
                         private_keys[0].public_key.to_canonical_address(),
                         private_keys[0])

    block = chain.get_block_by_hash(
        decode_hex(
            '0x6a8d49885e5f07ea66f722e4ec9ba9630a86f1189257317461196726bee7ea0c'
        ))

    new_chain = chain.get_blocks_on_chain(
        0, 3, decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))

    print('blocks on chain')
    for cur_block in new_chain:
        print(encode_hex(cur_block.header.hash))

    print()

    newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1]
    chain.chain_head_db.root_hash = newest_root_hash
    chain_head_hash = chain.chain_head_db.get_chain_head_hash(
        decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))
    print("chain_head_hash {}".format(encode_hex(chain_head_hash)))

    #
    # now lets delete all but the first block
    #
    print("Deleting all blocks but first")
    chain = MainnetChain(testdb,
                         private_keys[0].public_key.to_canonical_address(),
                         private_keys[0])
    chain.purge_block_and_all_children_and_set_parent_as_chain_head(
        block.header, save_block_head_hash_timestamp=True)

    newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1]
    chain.chain_head_db.root_hash = newest_root_hash
    chain_head_hash = chain.chain_head_db.get_chain_head_hash(
        decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))
    print("chain_head_hash {}".format(encode_hex(chain_head_hash)))

    #
    # Now lets import the second block again
    #

    print("Importing second block")
    chain.import_block(
        block,
        allow_replacement=False,
        ensure_block_unchanged=True,
    )

    newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1]
    chain.chain_head_db.root_hash = newest_root_hash
    chain_head_hash = chain.chain_head_db.get_chain_head_hash(
        decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))
    print("chain_head_hash {}".format(encode_hex(chain_head_hash)))
def journal_db(memory_db):
    return JournalDB(memory_db)