예제 #1
0
def debug_find_diff_chain_head_hash_between_historical_root_hashes():
    # this is from bootnode 1
    historical_root_hash_time_1 = 1564233000
    historical_root_hash_time_2 = 1564234000

    testdb_before = LevelDB(
        "/home/tommy/.local/share/helios/mainnet/chain/full_before_rebuild/")
    testdb_after = LevelDB(
        "/home/tommy/.local/share/helios/mainnet/chain/full/")
    #testdb = LevelDB("/home/tommy/.local/share/helios/bootnode_1_july_30/mainnet/chain/full/")

    testdb_before = JournalDB(testdb_before)
    testdb_after = JournalDB(testdb_after)
    #testdb = ReadOnlyDB(testdb)

    chain_before = MainnetChain(
        testdb_before, private_keys[0].public_key.to_canonical_address(),
        private_keys[0])
    chain_after = MainnetChain(
        testdb_after, private_keys[0].public_key.to_canonical_address(),
        private_keys[0])

    historical_root_hashes_before = chain_before.chain_head_db.get_historical_root_hashes(
    )
    historical_root_hashes_after = chain_after.chain_head_db.get_historical_root_hashes(
    )

    print(historical_root_hashes_before)
    print(historical_root_hashes_after)

    _print_chronological_blocks_and_actual_head_hash_diff(
        chain_before, historical_root_hash_time_1, historical_root_hash_time_2)
예제 #2
0
def add_transactions_to_blockchain_db(base_db, tx_list: List):
    # sort by time
    tx_list.sort(key=lambda x: x[3])

    for tx_key in tx_list:
        sender_priv_key = tx_key[0]
        receive_priv_key = tx_key[1]
        amount = tx_key[2]
        tx_timestamp = int(tx_key[3])

        if len(tx_key) > 4:
            gas_price = to_wei(tx_key[4], 'gwei')
        else:
            gas_price = to_wei(1, 'gwei')

        total_gas = gas_price
        sender_chain = TestnetChain(base_db, sender_priv_key.public_key.to_canonical_address(), sender_priv_key)
        dummy_sender_chain = TestnetChain(JournalDB(base_db), sender_priv_key.public_key.to_canonical_address(),
                                          sender_priv_key)

        dummy_sender_chain.create_and_sign_transaction_for_queue_block(
            gas_price=gas_price,
            gas=GAS_TX,
            to=receive_priv_key.public_key.to_canonical_address(),
            value=amount,
            data=b"",
            v=0,
            r=0,
            s=0
        )

        # import the block into the dummy chain to complete it and make sure it is valid
        imported_block = dummy_sender_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(
            header=imported_block.header.copy(timestamp=tx_timestamp).get_signed(sender_priv_key,
                                                                                 dummy_sender_chain.network_id))
        sender_chain.import_block(timestamp_modified_imported_block, allow_unprocessed=False)

        # then receive the transactions

        dummy_receiver_chain = TestnetChain(JournalDB(base_db), receive_priv_key.public_key.to_canonical_address(),
                                            receive_priv_key)
        dummy_receiver_chain.populate_queue_block_with_receive_tx()
        imported_block = dummy_receiver_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(
            header=imported_block.header.copy(timestamp=tx_timestamp).get_signed(receive_priv_key,
                                                                                  dummy_receiver_chain.network_id))
        # print('XXXXXXXXXX')
        # print(tx_timestamp)
        receiver_chain = TestnetChain(base_db, receive_priv_key.public_key.to_canonical_address(), receive_priv_key)
        receiver_chain.import_block(timestamp_modified_imported_block, allow_unprocessed=False)
예제 #3
0
    def __init__(self, db, state_root=BLANK_ROOT_HASH):
        r"""
        Internal implementation details (subject to rapid change):
        Database entries go through several pipes, like so...

        .. code::

                                                                    -> hash-trie -> storage lookups
                                                                  /
            db > _batchdb ---------------------------> _journaldb ----------------> code lookups
             \
              -> _batchtrie -> _trie -> _trie_cache -> _journaltrie --------------> account lookups

        Journaling sequesters writes at the _journal* attrs ^, until persist is called.

        _batchtrie enables us to prune all trie changes while building
        state,  without deleting old trie roots.

        _batchdb and _batchtrie together enable us to make the state root,
        without saving everything to the database.

        _journaldb is a journaling of the keys and values used to store
        code and account storage.

        _trie is a hash-trie, used to generate the state root

        _trie_cache is a cache tied to the state root of the trie. It
        is important that this cache is checked *after* looking for
        the key in _journaltrie, because the cache is only invalidated
        after a state root change.

        _journaltrie is a journaling of the accounts (an address->rlp_templates mapping,
        rather than the nodes stored by the trie). This enables
        a squashing of all account changes before pushing them into the trie.

        .. NOTE:: There is an opportunity to do something similar for storage

        AccountDB synchronizes the snapshot/revert/persist of both of the
        journals.
        """
        self.db = db
        self._batchdb = BatchDB(db)
        self._batchtrie = BatchDB(db)
        self._journaldb = JournalDB(self._batchdb)
        self._trie = HashTrie(
            HexaryTrie(self._batchtrie, state_root, prune=True))
        self._trie_cache = CacheDB(self._trie)
        self._journaltrie = JournalDB(self._trie_cache)
예제 #4
0
def db(request):
    base_db = MemoryDB()
    if request.param is JournalDB:
        return JournalDB(base_db)
    elif request.param is BatchDB:
        return BatchDB(base_db)
    elif request.param is MemoryDB:
        return base_db
    else:
        raise Exception("Invariant")
예제 #5
0
    def __init__(self, db):
        r"""
        Internal implementation details (subject to rapid change):

        Journaling sequesters writes at the _journal* attrs ^, until persist is called.

        _batchdb and _batchtrie together enable us to make the state root,
        without saving everything to the database.

        _journaldb is a journaling of the keys and values used to store
        code and account storage.

        TODO: add cache
        _trie_cache is a cache tied to the state root of the trie. It
        is important that this cache is checked *after* looking for
        the key in _journaltrie, because the cache is only invalidated
        after a state root change.

        AccountDB synchronizes the snapshot/revert/persist the
        journal.
        """
        self.db = db
        self._batchdb = BatchDB(db)
        self._journaldb = JournalDB(self._batchdb)
예제 #6
0
def create_valid_block_at_timestamp(base_db, private_key, transactions = None, receive_transactions = None, reward_bundle = None, timestamp = None):
    '''
    Tries to create a valid block based in the invalid block. The transactions and reward bundle must already be valid
    :param base_db:
    :param private_key:
    :param invalid_block:
    :return:
    '''
    if timestamp == None:
        timestamp = int(time.time())

    chain = TestnetChain(JournalDB(base_db), private_key.public_key.to_canonical_address(), private_key)

    queue_block = chain.get_queue_block()
    queue_block = queue_block.copy(header = queue_block.header.copy(timestamp = timestamp),
                                   transactions=transactions,
                                   receive_transactions=receive_transactions,
                                   reward_bundle=reward_bundle)


    valid_block = chain.get_vm(timestamp = timestamp).import_block(queue_block, validate = False, private_key = chain.private_key)


    return valid_block
from hvm.db.backends.level import LevelDB
from hvm.db.journal import JournalDB


def fix_blockchain_database_errors(base_db):
    '''
    Checks to make sure all chains match what is expected from saved chain head root hash
    :param base_db:
    :return:
    '''
    node_1 = MainnetChain(
        base_db,
        GENESIS_PRIVATE_KEY_FOR_TESTNET.public_key.to_canonical_address(),
        GENESIS_PRIVATE_KEY_FOR_TESTNET)
    chain_head_hashes = node_1.chain_head_db.get_head_block_hashes_list()

    for head_hash in chain_head_hashes:
        address = node_1.chaindb.get_chain_wallet_address_for_block_hash(
            head_hash)
        # make sure the head block matches the expected head_hash
        chain_head_header = node_1.chaindb.get_canonical_head_hash(address)

        if chain_head_header != head_hash:
            print('f**k')
            exit()


base_db = JournalDB(
    LevelDB('/home/tommy/.local/share/helios/instance_0/chain'))
fix_blockchain_database_errors(base_db)
예제 #8
0
def create_dev_fixed_blockchain_database(base_db, key_balance_dict, use_real_genesis = False):
    logger.debug("generating test fixed blockchain db")

    earliest_timestamp = int(time.time())
    required_total_supply = 0
    for balance_timestamp in key_balance_dict.values():
        required_total_supply += balance_timestamp[0]
        if balance_timestamp[1] < earliest_timestamp:
            earliest_timestamp = balance_timestamp[1]

    required_total_supply = required_total_supply*2

    #initialize db
    if use_real_genesis:
        sender_chain = import_genesis_block(base_db)
    else:
        genesis_params, genesis_state = create_new_genesis_params_and_state(TESTNET_GENESIS_PRIVATE_KEY, required_total_supply, earliest_timestamp - 100000)
        sender_chain = TestnetChain.from_genesis(base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), genesis_params, genesis_state)

    sender_chain.chaindb.initialize_historical_minimum_gas_price_at_genesis(min_gas_price=1, net_tpc_cap=5)

    prev_timestamp = 0
    for priv_key, balance_timestamp in key_balance_dict.items():
        sender_chain = TestnetChain(base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), TESTNET_GENESIS_PRIVATE_KEY)

        dummy_sender_chain = TestnetChain(JournalDB(base_db), TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), TESTNET_GENESIS_PRIVATE_KEY)

        balance = balance_timestamp[0]
        timestamp = balance_timestamp[1]
        if timestamp < prev_timestamp:
            raise ValueError("timestamps must be in ascending order")

        receiver_privkey = priv_key

        dummy_sender_chain.create_and_sign_transaction_for_queue_block(
                gas_price=0x01,
                gas=0x0c3500,
                to=receiver_privkey.public_key.to_canonical_address(),
                value=balance,
                data=b"",
                v=0,
                r=0,
                s=0
                )



        # import the block into the dummy chain to complete it and make sure it is valid
        imported_block = dummy_sender_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(header = imported_block.header.copy(timestamp = timestamp).get_signed(TESTNET_GENESIS_PRIVATE_KEY, dummy_sender_chain.network_id))
        sender_chain.import_block(timestamp_modified_imported_block, allow_unprocessed = False)

        #logger.debug("Receiving ")

        #then receive the transactions
        receiver_chain = TestnetChain(base_db, receiver_privkey.public_key.to_canonical_address(), receiver_privkey)
        dummy_receiver_chain = TestnetChain(JournalDB(base_db), receiver_privkey.public_key.to_canonical_address(), receiver_privkey)
        dummy_receiver_chain.populate_queue_block_with_receive_tx()
        imported_block = dummy_receiver_chain.import_current_queue_block()

        # altering block timestamp and importing again
        timestamp_modified_imported_block = imported_block.copy(header=imported_block.header.copy(timestamp=timestamp).get_signed(receiver_privkey, dummy_receiver_chain.network_id))
        receiver_chain.import_block(timestamp_modified_imported_block, allow_unprocessed=False)


    logger.debug("finished creating fixed blockchain")
예제 #9
0
def debug_test_1():
    testdb = LevelDB("/home/tommy/.local/share/helios/mainnet/chain/full/")

    testdb = JournalDB(testdb)
    testdb = ReadOnlyDB(testdb)

    chain = MainnetChain(testdb,
                         private_keys[0].public_key.to_canonical_address(),
                         private_keys[0])

    block = chain.get_block_by_hash(
        decode_hex(
            '0x6a8d49885e5f07ea66f722e4ec9ba9630a86f1189257317461196726bee7ea0c'
        ))

    new_chain = chain.get_blocks_on_chain(
        0, 3, decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))

    print('blocks on chain')
    for cur_block in new_chain:
        print(encode_hex(cur_block.header.hash))

    print()

    newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1]
    chain.chain_head_db.root_hash = newest_root_hash
    chain_head_hash = chain.chain_head_db.get_chain_head_hash(
        decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))
    print("chain_head_hash {}".format(encode_hex(chain_head_hash)))

    #
    # now lets delete all but the first block
    #
    print("Deleting all blocks but first")
    chain = MainnetChain(testdb,
                         private_keys[0].public_key.to_canonical_address(),
                         private_keys[0])
    chain.purge_block_and_all_children_and_set_parent_as_chain_head(
        block.header, save_block_head_hash_timestamp=True)

    newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1]
    chain.chain_head_db.root_hash = newest_root_hash
    chain_head_hash = chain.chain_head_db.get_chain_head_hash(
        decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))
    print("chain_head_hash {}".format(encode_hex(chain_head_hash)))

    #
    # Now lets import the second block again
    #

    print("Importing second block")
    chain.import_block(
        block,
        allow_replacement=False,
        ensure_block_unchanged=True,
    )

    newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1]
    chain.chain_head_db.root_hash = newest_root_hash
    chain_head_hash = chain.chain_head_db.get_chain_head_hash(
        decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac'))
    print("chain_head_hash {}".format(encode_hex(chain_head_hash)))
def journal_db(memory_db):
    return JournalDB(memory_db)