def debug_find_diff_chain_head_hash_between_historical_root_hashes(): # this is from bootnode 1 historical_root_hash_time_1 = 1564233000 historical_root_hash_time_2 = 1564234000 testdb_before = LevelDB( "/home/tommy/.local/share/helios/mainnet/chain/full_before_rebuild/") testdb_after = LevelDB( "/home/tommy/.local/share/helios/mainnet/chain/full/") #testdb = LevelDB("/home/tommy/.local/share/helios/bootnode_1_july_30/mainnet/chain/full/") testdb_before = JournalDB(testdb_before) testdb_after = JournalDB(testdb_after) #testdb = ReadOnlyDB(testdb) chain_before = MainnetChain( testdb_before, private_keys[0].public_key.to_canonical_address(), private_keys[0]) chain_after = MainnetChain( testdb_after, private_keys[0].public_key.to_canonical_address(), private_keys[0]) historical_root_hashes_before = chain_before.chain_head_db.get_historical_root_hashes( ) historical_root_hashes_after = chain_after.chain_head_db.get_historical_root_hashes( ) print(historical_root_hashes_before) print(historical_root_hashes_after) _print_chronological_blocks_and_actual_head_hash_diff( chain_before, historical_root_hash_time_1, historical_root_hash_time_2)
async def test_import_block_with_high_gas(request, event_loop): simulate_importing_from_rpc = False # Blocks with timestamps before time.time() - ADDITIVE_SYNC_MODE_CUTOFF-TIME_BETWEEN_HEAD_HASH_SAVE should be rejected. new_tx_time = int(time.time() - ADDITIVE_SYNC_MODE_CUTOFF / 2) tx_list = [[GENESIS_PRIVATE_KEY, RECEIVER, 100, new_tx_time, 101]] new_blocks_db = get_fresh_db() add_transactions_to_blockchain_db(new_blocks_db, tx_list) expect_blocks_to_import = True node_new_blocks = MainnetChain( new_blocks_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) new_blocks = node_new_blocks.get_all_chronological_blocks_for_window( int((new_tx_time) / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE) await _setup_test_import_blocks( request, event_loop, new_blocks_db, new_blocks, simulate_importing_from_rpc, expect_blocks_to_import=expect_blocks_to_import, node_min_gas_price=100)
def ensure_chronological_block_hashes_are_identical(base_db_1, base_db_2): node_1 = MainnetChain(base_db_1, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) node_2 = MainnetChain(base_db_2, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) node_1_chain_head_root_hash_timestamps = node_1.chain_head_db.get_historical_root_hashes() node_2_chain_head_root_hash_timestamps = node_2.chain_head_db.get_historical_root_hashes() assert (node_1_chain_head_root_hash_timestamps == node_2_chain_head_root_hash_timestamps)
def create_mainnet_genesis_transactions(base_db): import sys sys.path.append('/d:/Google Drive/forex/blockchain_coding/Helios/prototype desktop/helios_deploy/') from deploy_params import ( genesis_private_key, airdrop_private_key, bounties_private_key, exchange_listings_private_key, dapp_incubator_private_key, bootnode_1_private_key, bootnode_2_private_key, masternode_1_private_key, ) chain = MainnetChain(base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), TESTNET_GENESIS_PRIVATE_KEY) genesis_block_timestamp = chain.genesis_block_timestamp min_time_between_blocks = chain.get_vm(timestamp=genesis_block_timestamp).min_time_between_blocks time_between_head_hash_save = TIME_BETWEEN_HEAD_HASH_SAVE start_time = genesis_block_timestamp + time_between_head_hash_save tx_list = [] # main accounts tx_list.append([genesis_private_key, airdrop_private_key, to_wei(110000000, 'ether'), start_time + min_time_between_blocks]) tx_list.append([genesis_private_key, dapp_incubator_private_key, to_wei(70000000, 'ether'), start_time + min_time_between_blocks*2]) tx_list.append([genesis_private_key, bounties_private_key, to_wei(50000000, 'ether'), start_time + min_time_between_blocks*3]) tx_list.append([genesis_private_key, exchange_listings_private_key, to_wei(40000000, 'ether'), start_time + min_time_between_blocks*4]) # stake for bootnodes (this is large for now to prevent 51% attacks until the network has grown to a sufficiently stable size) tx_list.append([airdrop_private_key, bootnode_1_private_key, to_wei(10000000, 'ether'), start_time + min_time_between_blocks*6]) tx_list.append([airdrop_private_key, bootnode_2_private_key, to_wei(10000000, 'ether'), start_time + min_time_between_blocks*7]) tx_list.append([airdrop_private_key, masternode_1_private_key, to_wei(10000000, 'ether'), start_time + min_time_between_blocks*8]) add_transactions_to_blockchain_db(base_db, tx_list)
def test_invalid_proofs_timestamp_in_past(): testdb = create_reward_test_blockchain_database() chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) min_time_between_blocks = chain.get_vm(timestamp=Timestamp(int(time.time()))).min_time_between_blocks tx_list = [[private_keys[1], private_keys[0], to_wei(1, 'ether'), int(int(time.time())-min_time_between_blocks*10)]] add_transactions_to_blockchain_db(testdb, tx_list) chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) required_number_of_proofs_for_reward_type_2_proof = chain.get_consensus_db(timestamp=Timestamp(int(time.time()))).required_number_of_proofs_for_reward_type_2_proof node_staking_scores = [] # First score/proof with timestamp far in future current_private_key = private_keys[1] node_staking_score = NodeStakingScore( recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(), score=int(1000000), since_block_number=0, timestamp=int(time.time())-60*10, head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash( current_private_key.public_key.to_canonical_address()), v=0, r=0, s=0, ) signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID) node_staking_scores.append(signed_node_staking_score) score = 100000 for i in range(2, 10): # Second score/proof is from instance 1 current_private_key = private_keys[i] node_staking_score = NodeStakingScore( recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(), score=int(score-i), since_block_number=1, timestamp=int(time.time()), head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash( current_private_key.public_key.to_canonical_address()), v=0, r=0, s=0, ) signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID) node_staking_scores.append(signed_node_staking_score) # Now we try to import the reward block with instance 0 reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) with pytest.raises(ValidationError): reward_chain.import_current_queue_block_with_reward(node_staking_scores)
async def _setup_test_import_blocks(request, event_loop, new_blocks_db, new_blocks, simulate_importing_from_rpc, expect_blocks_to_import, node_min_gas_price=1): client_db, server_db, fresh_db = get_fresh_db(), get_fresh_db( ), get_fresh_db() node_1 = MainnetChain( server_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) node_1.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=node_min_gas_price, net_tpc_cap=100, tpc=1) node_2 = MainnetChain( client_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) node_2.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=node_min_gas_price, net_tpc_cap=100, tpc=1) if expect_blocks_to_import: expected_db = new_blocks_db else: expected_db = fresh_db async def waiting_function(client, server): SYNC_TIMEOUT = 100 async def wait_loop(): while ((client.chain_head_db.get_historical_root_hashes()[-1][1] != server.chain_head_db.get_historical_root_hashes()[-1][1]) or not client._new_blocks_to_import.empty() or not server._new_blocks_to_import.empty()): await asyncio.sleep(0.5) await asyncio.wait_for(wait_loop(), SYNC_TIMEOUT) def validation_function(base_db_1, base_db_2): ensure_blockchain_databases_identical(base_db_1, base_db_2) #In this case they are valid blocks so we expect them to match the database where the blocks came from ensure_blockchain_databases_identical(base_db_1, expected_db) await _test_sync_with_variable_sync_parameters( request, event_loop, client_db, server_db, validation_function=validation_function, waiting_function=waiting_function, blocks_to_import=new_blocks, blocks_to_import_from_rpc=simulate_importing_from_rpc)
def ensure_blockchain_databases_identical(base_db_1, base_db_2): node_1 = MainnetChain(base_db_1, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) node_2 = MainnetChain(base_db_2, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) # Get all of the addresses of every chain next_head_hashes = node_1.chain_head_db.get_head_block_hashes_list() wallet_addresses = [] for next_head_hash in next_head_hashes: chain_address = node_1.chaindb.get_chain_wallet_address_for_block_hash(next_head_hash) wallet_addresses.append(chain_address) next_head_hashes_node_2 = node_2.chain_head_db.get_head_block_hashes_list() # This gaurantees both have all the same chains assert (next_head_hashes == next_head_hashes_node_2) for wallet_address in wallet_addresses: # Compare all properties of each account with the hashes node_1_account_hash = node_1.get_vm().state.account_db.get_account_hash(wallet_address) node_2_account_hash = node_2.get_vm().state.account_db.get_account_hash(wallet_address) assert (node_1_account_hash == node_2_account_hash) # Compare all chains in database node_1_chain = node_1.get_all_blocks_on_chain(wallet_address) node_2_chain = node_2.get_all_blocks_on_chain(wallet_address) assert (node_1_chain == node_2_chain) # Compare the blocks at a deeper level for i in range(len(node_1_chain)): assert (node_1_chain[i].hash == node_2_chain[i].hash) assert_var_1 = node_1.chaindb.get_all_descendant_block_hashes(node_1_chain[i].hash) assert_var_2 = node_2.chaindb.get_all_descendant_block_hashes(node_2_chain[i].hash) assert ( assert_var_1==assert_var_2 )
async def test_additive_sync_3(request, event_loop): client_db, server_db = get_random_long_time_blockchain_db( 10), get_random_long_time_blockchain_db(10) node_1 = MainnetChain( server_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) node_1.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=1, net_tpc_cap=100, tpc=1) node_2 = MainnetChain( client_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) node_2.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=1, net_tpc_cap=100, tpc=1) await _test_sync_with_variable_sync_parameters( request, event_loop, client_db, server_db, ensure_blockchain_databases_identical, ADDITIVE_SYNC_STAGE_ID)
def initialize_database(chain_config: ChainConfig, chaindb: AsyncChainDB) -> None: try: chaindb.get_canonical_head(chain_address=GENESIS_WALLET_ADDRESS) except CanonicalHeadNotFound: if chain_config.network_id == MAINNET_NETWORK_ID: MainnetChain.from_genesis(chaindb.db, chain_config.node_wallet_address, MAINNET_GENESIS_PARAMS, MAINNET_GENESIS_STATE) else: # TODO: add genesis data to ChainConfig and if it's present, use it # here to initialize the chain. raise NotImplementedError( "Only the mainnet and ropsten chains are currently supported")
def print_blockchain_database(base_db): node_1 = MainnetChain(base_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) chain_head_hashes = node_1.chain_head_db.get_head_block_hashes_list() i = 0 for head_hash in chain_head_hashes: print("Chain number {}".format(i)) chain = node_1.get_all_blocks_on_chain_by_head_block_hash(head_hash) j = 0 for block in chain: print("Block number {}".format(i)) print(block_to_dict(block, True, node_1)) j += 1 i += 1
def create_block_params(): from hvm.chains.mainnet import ( MAINNET_TPC_CAP_TEST_GENESIS_PARAMS, MAINNET_TPC_CAP_TEST_GENESIS_STATE, TPC_CAP_TEST_GENESIS_PRIVATE_KEY, ) db = MemoryDB() chain = MainnetChain.from_genesis( db, TPC_CAP_TEST_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), MAINNET_TPC_CAP_TEST_GENESIS_PARAMS, MAINNET_TPC_CAP_TEST_GENESIS_STATE, private_key=TPC_CAP_TEST_GENESIS_PRIVATE_KEY) receiver_privkey = keys.PrivateKey(random_private_keys[0]) chain.create_and_sign_transaction_for_queue_block( gas_price=0x01, gas=0x0c3500, to=receiver_privkey.public_key.to_canonical_address(), value=1000, data=b"", v=0, r=0, s=0) imported_block = chain.import_current_queue_block() block_dict = imported_block.to_dict() print(block_dict) # create_block_params() # sys.exit()
async def test_consensus_match_sync_4(request, event_loop): ''' Blockchain databases of client and server match up to a point within the consensus match stage, but there are additional blocks in the server's db after that time. :param request: :param event_loop: :return: ''' genesis_time = int(time.time() / 1000) * 1000 - 1000 * 900 equal_to_time = int(time.time() / 1000) * 1000 - 1000 * 890 new_blocks_start_time = int(time.time() / 1000) * 1000 - 1000 * 25 new_blocks_end_time = int(time.time() / 1000) * 1000 - 1000 * 3 server_db = get_random_blockchain_to_time(genesis_time, equal_to_time) client_db = MemoryDB(kv_store=server_db.kv_store.copy()) add_random_transactions_to_db_for_time_window(server_db, equal_to_time, equal_to_time + 1000 * 5) add_random_transactions_to_db_for_time_window(server_db, new_blocks_start_time, new_blocks_end_time) client_node = MainnetChain( client_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) client_node.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=1, net_tpc_cap=100, tpc=1) await _test_sync_with_variable_sync_parameters( request, event_loop, client_db, server_db, ensure_blockchain_databases_identical)
def test_invalid_proofs_all_from_same_wallet(): testdb = create_reward_test_blockchain_database() chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) required_number_of_proofs_for_reward_type_2_proof = chain.get_consensus_db(timestamp=Timestamp(int(time.time()))).required_number_of_proofs_for_reward_type_2_proof node_staking_scores = [] score = 1000000 for i in range(1, 10): # Second score/proof is from instance 1 current_private_key = private_keys[1] node_staking_score = NodeStakingScore( recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(), score=int(score-i), since_block_number=0, timestamp=int(time.time()), head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash( current_private_key.public_key.to_canonical_address()), v=0, r=0, s=0, ) signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID) node_staking_scores.append(signed_node_staking_score) # Now we try to import the reward block with instance 0 reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) with pytest.raises(ValidationError): reward_chain.import_current_queue_block_with_reward(node_staking_scores)
def create_reward_test_blockchain_database(): testdb = MemoryDB() chain = MainnetChain(testdb, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) coin_mature_time = chain.get_vm(timestamp=Timestamp(int(time.time()))).consensus_db.coin_mature_time_for_staking min_time_between_blocks = chain.get_vm(timestamp=Timestamp(int(time.time()))).min_time_between_blocks required_stake_for_reward_type_2_proof = chain.get_consensus_db(timestamp=Timestamp(int(time.time()))).required_stake_for_reward_type_2_proof now = int(time.time()) start = now - max((coin_mature_time * 2), (min_time_between_blocks * 20)) key_balance_dict = {} for i in range(10): key_balance_dict[private_keys[i]] = ( required_stake_for_reward_type_2_proof, start + min_time_between_blocks * i) create_dev_fixed_blockchain_database(testdb, key_balance_dict) return testdb
async def test_consensus_match_sync_2(request, event_loop): server_db, client_db = get_fresh_db(), get_random_long_time_blockchain_db( 25) node_2 = MainnetChain( client_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) node_2.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=1, net_tpc_cap=100, tpc=1) await _test_sync_with_variable_sync_parameters( request, event_loop, client_db, server_db, ensure_blockchain_databases_identical, CONSENSUS_MATCH_SYNC_STAGE_ID)
def create_dev_test_blockchain_database_with_given_transactions(base_db, tx_list: List, use_real_genesis = False): # sort by time tx_list.sort(key=lambda x: x[3]) genesis_chain_stake = 100000000000000000 total_required_gas = sum([(to_wei(tx_key[4], 'gwei') if len(tx_key) > 4 else to_wei(1, 'gwei'))*GAS_TX for tx_key in tx_list]) earliest_timestamp = tx_list[0][3] required_total_supply = sum([x[2] for x in tx_list if x[0] == GENESIS_PRIVATE_KEY])+genesis_chain_stake+total_required_gas if use_real_genesis: import_genesis_block(base_db) else: genesis_params, genesis_state = create_new_genesis_params_and_state(GENESIS_PRIVATE_KEY, required_total_supply, earliest_timestamp - 100000) # import genesis block MainnetChain.from_genesis(base_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), genesis_params, genesis_state) add_transactions_to_blockchain_db(base_db, tx_list)
def debug_test_2(): testdb = LevelDB("/home/tommy/.local/share/helios/mainnet/chain/full/") testdb = ReadOnlyDB(testdb) chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) chronological_blocks = chain.chain_head_db.load_chronological_block_window( 1564233000) print([encode_hex(x[1]) for x in chronological_blocks])
def create_new_genesis_params_and_state(): # # GENESIS STATE, HEADER PARAMS # new_genesis_private_key = genesis_private_key print("Ceating new genesis params and state for genesis wallet address:") print(new_genesis_private_key.public_key.to_canonical_address()) total_supply = 350000000 * 10**18 new_mainnet_genesis_params = { 'chain_address': new_genesis_private_key.public_key.to_canonical_address(), 'parent_hash': constants.GENESIS_PARENT_HASH, 'transaction_root': constants.BLANK_ROOT_HASH, 'receive_transaction_root': constants.BLANK_ROOT_HASH, 'receipt_root': constants.BLANK_ROOT_HASH, 'bloom': 0, 'block_number': constants.GENESIS_BLOCK_NUMBER, 'gas_limit': constants.GENESIS_GAS_LIMIT, 'gas_used': 0, 'timestamp': 1556733839, 'extra_data': constants.GENESIS_EXTRA_DATA, 'reward_hash': constants.GENESIS_REWARD_HASH, 'account_balance': total_supply, } new_genesis_state = { new_genesis_private_key.public_key.to_canonical_address(): { "balance": total_supply, "code": b"", "nonce": 0, "storage": {} } } testdb1 = MemoryDB() genesis_header = MainnetChain.create_genesis_header( testdb1, new_genesis_private_key.public_key.to_canonical_address(), new_genesis_private_key, new_mainnet_genesis_params, new_genesis_state) print() print("New completed and signed genesis header params") parameter_names = list(dict(genesis_header._meta.fields).keys()) header_params = {} for parameter_name in parameter_names: header_params[parameter_name] = getattr(genesis_header, parameter_name) print(header_params) print()
def test_invalid_proofs_no_proofs(): testdb = create_reward_test_blockchain_database() chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) min_time_between_blocks = chain.get_vm(timestamp=Timestamp(int(time.time()))).min_time_between_blocks tx_list = [[private_keys[1], private_keys[0], to_wei(1, 'ether'), int(int(time.time())-min_time_between_blocks*10)]] add_transactions_to_blockchain_db(testdb, tx_list) chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) required_number_of_proofs_for_reward_type_2_proof = chain.get_consensus_db(timestamp=Timestamp(int(time.time()))).required_number_of_proofs_for_reward_type_2_proof # Now we try to import the reward block with instance 0 reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) with pytest.raises(RewardAmountRoundsToZero): imported_block = reward_chain.import_current_queue_block_with_reward([])
def create_valid_block_at_timestamp(base_db, private_key, transactions = None, receive_transactions = None, reward_bundle = None, timestamp = None): ''' Tries to create a valid block based in the invalid block. The transactions and reward bundle must already be valid :param base_db: :param private_key: :param invalid_block: :return: ''' if timestamp == None: timestamp = int(time.time()) chain = MainnetChain(JournalDB(base_db), private_key.public_key.to_canonical_address(), private_key) queue_block = chain.get_queue_block() queue_block = queue_block.copy(header = queue_block.header.copy(timestamp = timestamp), transactions=transactions, receive_transactions=receive_transactions, reward_bundle=reward_bundle) valid_block = chain.get_vm(timestamp = timestamp).import_block(queue_block, validate = False, private_key = chain.private_key) return valid_block
async def test_additive_sync_4(request, event_loop): ''' Blockchain databases of client and server match up to a point within the consensus match stage, but there are additional blocks in the server's db after that time. :param request: :param event_loop: :return: ''' genesis_time = int(time.time() / 1000) * 1000 - 1000 * 25 equal_to_time = int(time.time() / 1000) * 1000 - 1000 * 2 server_db = get_random_blockchain_to_time(genesis_time, equal_to_time) client_db = MemoryDB(kv_store=server_db.kv_store.copy()) tx_list = [[GENESIS_PRIVATE_KEY, RECEIVER, 100, int(time.time() - 2000)], [GENESIS_PRIVATE_KEY, RECEIVER, 100, int(time.time() - 1500)], [GENESIS_PRIVATE_KEY, RECEIVER, 100, int(time.time() - 1000)]] add_transactions_to_blockchain_db(server_db, tx_list) client_node = MainnetChain( client_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) client_node.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=1, net_tpc_cap=100, tpc=1) server_node = MainnetChain( server_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) server_node.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=1, net_tpc_cap=100, tpc=1) await _test_sync_with_variable_sync_parameters( request, event_loop, client_db, server_db, ensure_blockchain_databases_identical)
def fix_blockchain_database_errors(base_db): ''' Checks to make sure all chains match what is expected from saved chain head root hash :param base_db: :return: ''' node_1 = MainnetChain(base_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) chain_head_hashes = node_1.chain_head_db.get_head_block_hashes_list() for head_hash in chain_head_hashes: address = node_1.chaindb.get_chain_wallet_address_for_block_hash(head_hash) # make sure the head block matches the expected head_hash chain_head_header = node_1.chaindb.get_canonical_head_hash(address) if chain_head_header != head_hash: print('f**k') exit()
async def test_fast_sync_2(request, event_loop): genesis_time = int(time.time() / 1000) * 1000 - 1000 * 1100 equal_to_time = int(time.time() / 1000) * 1000 - 1000 * 1095 server_db = get_random_blockchain_to_time(genesis_time, equal_to_time) client_db = MemoryDB(kv_store=server_db.kv_store.copy()) add_random_transactions_to_db_for_time_window(client_db, equal_to_time, equal_to_time + 1000 * 5) node_2 = MainnetChain( client_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) node_2.chaindb.initialize_historical_minimum_gas_price_at_genesis( min_gas_price=1, net_tpc_cap=100, tpc=1) await _test_sync_with_variable_sync_parameters( request, event_loop, client_db, server_db, ensure_blockchain_databases_identical, FAST_SYNC_STAGE_ID)
def create_new_genesis_params_and_state(private_key, total_supply = 100000000 * 10 ** 18, timestamp = int(time.time())): print("CREATING GENESIS BLOCK WITH TOTAL SUPPLY = ", total_supply) new_genesis_private_key = private_key print("Ceating new genesis params and state for genesis wallet address:") print(new_genesis_private_key.public_key.to_canonical_address()) new_mainnet_genesis_params = { 'chain_address': new_genesis_private_key.public_key.to_canonical_address(), 'parent_hash': constants.GENESIS_PARENT_HASH, 'transaction_root': constants.BLANK_ROOT_HASH, 'receive_transaction_root': constants.BLANK_ROOT_HASH, 'receipt_root': constants.BLANK_ROOT_HASH, 'bloom': 0, 'block_number': constants.GENESIS_BLOCK_NUMBER, 'gas_limit': constants.GENESIS_GAS_LIMIT, 'gas_used': 0, 'timestamp': timestamp, 'extra_data': constants.GENESIS_EXTRA_DATA, 'reward_hash': constants.GENESIS_REWARD_HASH, 'account_balance': total_supply, } new_genesis_state = { new_genesis_private_key.public_key.to_canonical_address(): { "balance": total_supply, "code": b"", "nonce": 0, "storage": {} } } testdb1 = MemoryDB() genesis_header = MainnetChain.create_genesis_header(testdb1, new_genesis_private_key.public_key.to_canonical_address(), new_genesis_private_key, new_mainnet_genesis_params, new_genesis_state) parameter_names = list(dict(genesis_header._meta.fields).keys()) header_params = {} for parameter_name in parameter_names: header_params[parameter_name] = getattr(genesis_header, parameter_name) return header_params, new_genesis_state
def test_min_allowed_gas_system(): testdb1 = MemoryDB() tpc_of_blockchain_database = 1 num_tpc_windows_to_go_back = 60 create_blockchain_database_for_exceeding_tpc_cap( testdb1, tpc_of_blockchain_database, num_tpc_windows_to_go_back) # testdb1 = JournalDB(testdb1) node_1 = MainnetChain( testdb1, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) # Follow the process that consensus will be using to sync the min gas system local_tpc_cap = node_1.get_local_tpc_cap() init_min_gas_price = 1 init_tpc_cap = local_tpc_cap init_tpc = 10 #initialize the min gas system node_1.chaindb.initialize_historical_minimum_gas_price_at_genesis( init_min_gas_price, init_tpc_cap, init_tpc) historical_min_gas_price = node_1.chaindb.load_historical_minimum_gas_price( ) historical_network_tpc_capability = node_1.chaindb.load_historical_network_tpc_capability( ) historical_tpc = node_1.chaindb.load_historical_tx_per_centisecond() assert (all([x[1] == init_min_gas_price for x in historical_min_gas_price])) assert (all( [x[1] == init_tpc_cap for x in historical_network_tpc_capability])) assert (all([x[1] == init_tpc for x in historical_tpc])) # update the newest tpc cap and check that it saved node_1.update_current_network_tpc_capability(local_tpc_cap, update_min_gas_price=True) historical_min_gas_price = node_1.chaindb.load_historical_minimum_gas_price( ) historical_tpc_cap = node_1.chaindb.load_historical_network_tpc_capability( ) historical_tpc = node_1.chaindb.load_historical_tx_per_centisecond() assert (historical_tpc_cap[-1][1] == local_tpc_cap) assert (historical_min_gas_price[-1][1] == 1) # Updating tpc will cause it to see that the initial tpc doesnt match the blockchain database, and correct it. # It will only go back at most 60 centiseconds, or at least 50. # need to say == True to make pytest happy assert (all([ x[1] == tpc_of_blockchain_database * 2 for x in historical_tpc[-50:-1] ])) # the given tpc from the database is below the threshold. So min gas should stay at 1 assert (all([x[1] == 1 for x in historical_min_gas_price[-50:]])) # # # #now lets create a database where the tx/sec is above the threshold and make sure hostorical has price increases testdb1 = MemoryDB() tpc_of_blockchain_database = 4 num_tpc_windows_to_go_back = 60 create_blockchain_database_for_exceeding_tpc_cap( testdb1, tpc_of_blockchain_database, num_tpc_windows_to_go_back) # testdb1 = JournalDB(testdb1) node_1 = MainnetChain( testdb1, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) # Follow the process that consensus will be using to sync the min gas system init_min_gas_price = 1 init_tpc_cap = 2 init_tpc = 1 # initialize the min gas system node_1.chaindb.initialize_historical_minimum_gas_price_at_genesis( init_min_gas_price, init_tpc_cap, init_tpc) historical_min_gas_price = node_1.chaindb.load_historical_minimum_gas_price( ) historical_network_tpc_capability = node_1.chaindb.load_historical_network_tpc_capability( ) historical_tpc = node_1.chaindb.load_historical_tx_per_centisecond() assert (all([x[1] == init_min_gas_price for x in historical_min_gas_price])) assert (all( [x[1] == init_tpc_cap for x in historical_network_tpc_capability])) assert (all([x[1] == init_tpc for x in historical_tpc])) # update the newest tpc cap and check that it saved node_1.update_current_network_tpc_capability(init_tpc_cap, update_min_gas_price=True) historical_min_gas_price = node_1.chaindb.load_historical_minimum_gas_price( ) historical_tpc_cap = node_1.chaindb.load_historical_network_tpc_capability( ) historical_tpc = node_1.chaindb.load_historical_tx_per_centisecond() # plt.plot([x[1] for x in historical_min_gas_price]) # plt.show() assert (historical_tpc_cap[-1][1] == init_tpc_cap) assert (historical_min_gas_price[-1][1] > 1) # Updating tpc will cause it to see that the initial tpc doesnt match the blockchain database, and correct it. # It will only go back at most 60 centiseconds, or at least 50. #need to say == True to make pytest happy assert (all([ x[1] == tpc_of_blockchain_database * 2 for x in historical_tpc[-50:-1] ])) # test_min_allowed_gas_system() # exit()
def test_boson_vm_calculate_node_staking_score(): from hvm.vm.forks.boson.consensus import TIME_BETWEEN_PEER_NODE_HEALTH_CHECK testdb = MemoryDB() sender_chain = MainnetChain.from_genesis( testdb, SENDER.public_key.to_canonical_address(), MAINNET_GENESIS_PARAMS, MAINNET_GENESIS_STATE) boson_fork_timestamp = 0 for timestamp_vm_config in MainnetChain.vm_configuration: if timestamp_vm_config[1].fork == 'boson': boson_fork_timestamp = timestamp_vm_config[0] boson_vm = sender_chain.get_vm(timestamp=boson_fork_timestamp) consensus_db = boson_vm.consensus_db # # score vs latency # latency = [] staking_score = [] for current_latency in range(1000, 1000000, 100000): current_staking_score = consensus_db.calculate_node_staking_score( average_response_time=current_latency, failed_requests=0, requests_sent=100, time_since_last_reward=TIME_BETWEEN_PEER_NODE_HEALTH_CHECK * 100) latency.append(current_latency / 1000) staking_score.append(current_staking_score / 10000) print(staking_score) print(latency) plt.plot(latency, staking_score) plt.xlabel('Latency (ms)') plt.ylabel('Percentage of max stake') plt.savefig('plots/staking_score_vs_latency.png', bbox_inches='tight') plt.clf() # # score vs failed requests # failed_requests = [] staking_score = [] for current_failed_requests in range(0, 100, 5): current_staking_score = consensus_db.calculate_node_staking_score( average_response_time=100000, failed_requests=current_failed_requests, requests_sent=100, time_since_last_reward=TIME_BETWEEN_PEER_NODE_HEALTH_CHECK * 100) failed_requests.append(current_failed_requests) staking_score.append(current_staking_score / 10000) print(failed_requests) print(staking_score) plt.plot(failed_requests, staking_score) plt.xlabel('Failed requests (% of requests sent)') plt.ylabel('Percentage of max stake') plt.savefig('plots/staking_score_vs_failed_requests.png', bbox_inches='tight') plt.clf() # # score vs percentage of uptime # percentage_of_uptime = [] staking_score = [] start = TIME_BETWEEN_PEER_NODE_HEALTH_CHECK * 10 for current_time_since_last_reward in range(start, start + start * 100, start): current_staking_score = consensus_db.calculate_node_staking_score( average_response_time=100000, failed_requests=0, requests_sent=10, time_since_last_reward=current_time_since_last_reward) percentage_of_uptime.append(start / current_time_since_last_reward) staking_score.append(current_staking_score / 10000) print(percentage_of_uptime) print(staking_score) plt.plot(percentage_of_uptime, staking_score) plt.xlabel('Percentage of uptime') plt.ylabel('Percentage of max stake') plt.savefig('plots/staking_score_vs_time_since_last_reward.png', bbox_inches='tight') plt.clf()
def debug_test_1(): testdb = LevelDB("/home/tommy/.local/share/helios/mainnet/chain/full/") testdb = JournalDB(testdb) testdb = ReadOnlyDB(testdb) chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) block = chain.get_block_by_hash( decode_hex( '0x6a8d49885e5f07ea66f722e4ec9ba9630a86f1189257317461196726bee7ea0c' )) new_chain = chain.get_blocks_on_chain( 0, 3, decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac')) print('blocks on chain') for cur_block in new_chain: print(encode_hex(cur_block.header.hash)) print() newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1] chain.chain_head_db.root_hash = newest_root_hash chain_head_hash = chain.chain_head_db.get_chain_head_hash( decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac')) print("chain_head_hash {}".format(encode_hex(chain_head_hash))) # # now lets delete all but the first block # print("Deleting all blocks but first") chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) chain.purge_block_and_all_children_and_set_parent_as_chain_head( block.header, save_block_head_hash_timestamp=True) newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1] chain.chain_head_db.root_hash = newest_root_hash chain_head_hash = chain.chain_head_db.get_chain_head_hash( decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac')) print("chain_head_hash {}".format(encode_hex(chain_head_hash))) # # Now lets import the second block again # print("Importing second block") chain.import_block( block, allow_replacement=False, ensure_block_unchanged=True, ) newest_root_hash = chain.chain_head_db.get_historical_root_hashes()[-1][1] chain.chain_head_db.root_hash = newest_root_hash chain_head_hash = chain.chain_head_db.get_chain_head_hash( decode_hex('0x1d1a2266a15CcB2e70baeB4b75b2c59Da95498ac')) print("chain_head_hash {}".format(encode_hex(chain_head_hash)))
def test_break_chronological_consistency_1(): testdb = create_reward_test_blockchain_database() chain = MainnetChain(testdb, private_keys[1].public_key.to_canonical_address(), private_keys[1]) tx_nonce = chain.get_current_queue_block_nonce() tx = chain.create_and_sign_transaction(nonce = tx_nonce, gas_price=to_wei(1, 'gwei'), gas=GAS_TX, to=private_keys[0].public_key.to_canonical_address(), value=1, data=b"", v=0, r=0, s=0 ) new_block_to_import = create_valid_block_at_timestamp(testdb, private_keys[1], transactions=[tx], receive_transactions=None, reward_bundle=None, timestamp=int(time.time()-1)) chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) node_staking_scores = [] score = 100000 for i in range(1, 10): # Second score/proof is from instance 1 current_private_key = private_keys[i] node_staking_score = NodeStakingScore( recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(), score=int(score-i), since_block_number=0, timestamp=int(time.time()), head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash( current_private_key.public_key.to_canonical_address()), v=0, r=0, s=0, ) signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID) node_staking_scores.append(signed_node_staking_score) # Now we try to import the reward block with instance 0 reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) reward_block = reward_chain.import_current_queue_block_with_reward(node_staking_scores) print("reward block timestamp {}".format(reward_block.header.timestamp)) for proof in reward_block.reward_bundle.reward_type_2.proof: print(encode_hex(proof.sender)) for i in range(1, 10): if proof.sender == private_keys[i].public_key.to_canonical_address(): print("proof from {}".format(i)) print("new_block_to_import timestamp {}".format(new_block_to_import.header.timestamp)) print("new_block_to_import chain address {}".format(encode_hex(new_block_to_import.header.chain_address))) # Now we import a block on private key 1 with a timestamp set to 10 seconds ago chain = MainnetChain(testdb, private_keys[1].public_key.to_canonical_address(), private_keys[1]) with pytest.raises(ReplacingBlocksNotAllowed): chain.import_block(new_block_to_import, allow_replacement=False) # test_break_chronological_consistency_1() # exit()
def test_invalid_proofs_not_enough_stake(): testdb = MemoryDB() chain = MainnetChain(testdb, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) coin_mature_time = chain.get_vm(timestamp=Timestamp(int(time.time()))).consensus_db.coin_mature_time_for_staking min_time_between_blocks = chain.get_vm(timestamp=Timestamp(int(time.time()))).min_time_between_blocks now = int(time.time()) start = now - max((coin_mature_time * 2), (min_time_between_blocks * 20)) key_balance_dict = { private_keys[0]: (to_wei(1, 'ether'), start), private_keys[1]: (to_wei(1, 'ether'), start + min_time_between_blocks * 1), private_keys[2]: (to_wei(1, 'ether'), start + min_time_between_blocks * 2), private_keys[3]: (to_wei(1, 'ether'), start + min_time_between_blocks * 3), private_keys[4]: (to_wei(1, 'ether'), start + min_time_between_blocks * 4), private_keys[5]: (to_wei(1, 'ether'), start + min_time_between_blocks * 5), private_keys[6]: (to_wei(1, 'ether'), start + min_time_between_blocks * 6), private_keys[7]: (to_wei(1, 'ether'), start + min_time_between_blocks * 7), private_keys[8]: (to_wei(1, 'ether'), start + min_time_between_blocks * 8), private_keys[9]: (to_wei(1, 'ether'), now - coin_mature_time + 1), # immature } create_dev_fixed_blockchain_database(testdb, key_balance_dict) chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) node_staking_scores = [] # First score/proof with timestamp far in future current_private_key = private_keys[1] node_staking_score = NodeStakingScore( recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(), score=int(1000000), since_block_number=0, timestamp=int(time.time())-60*10, head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash( current_private_key.public_key.to_canonical_address()), v=0, r=0, s=0, ) signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID) node_staking_scores.append(signed_node_staking_score) score = 100000 for i in range(2, 10): # Second score/proof is from instance 1 current_private_key = private_keys[i] node_staking_score = NodeStakingScore( recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(), score=int(score-i), since_block_number=1, timestamp=int(time.time()), head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash( current_private_key.public_key.to_canonical_address()), v=0, r=0, s=0, ) signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID) node_staking_scores.append(signed_node_staking_score) # Now we try to import the reward block with instance 0 reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) with pytest.raises(NotEnoughProofsOrStakeForRewardType2Proof): reward_chain.import_current_queue_block_with_reward(node_staking_scores)
def _test_block_rewards_system(): #The genesis chain will be adding a reward block. We need to generate fake NodeStakingScores from a bunch of other #nodes # testdb = LevelDB('/home/tommy/.local/share/helios/instance_test/mainnet/chain/full/') # testdb = JournalDB(testdb) testdb = create_reward_test_blockchain_database() chain = MainnetChain(testdb, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) coin_mature_time = chain.get_vm(timestamp=Timestamp(int(time.time()))).consensus_db.coin_mature_time_for_staking min_time_between_blocks = chain.get_vm(timestamp=Timestamp(int(time.time()))).min_time_between_blocks # now = int(time.time()) # start = now - max((coin_mature_time * 2), (min_time_between_blocks*20)) # key_balance_dict = { # private_keys[0]: (to_wei(10, 'ether'), start), # private_keys[1]: (to_wei(200, 'ether'), start + min_time_between_blocks * 1), # private_keys[2]: (to_wei(340, 'ether'), start + min_time_between_blocks * 2), # private_keys[3]: (to_wei(1000, 'ether'), start + min_time_between_blocks * 3), # private_keys[4]: (to_wei(1400, 'ether'), start + min_time_between_blocks * 4), # private_keys[5]: (to_wei(2400, 'ether'), start + min_time_between_blocks * 5), # private_keys[6]: (to_wei(3000, 'ether'), start + min_time_between_blocks * 6), # private_keys[7]: (to_wei(4000, 'ether'), start + min_time_between_blocks * 7), # private_keys[8]: (to_wei(1000, 'ether'), start + min_time_between_blocks * 8), # private_keys[9]: (to_wei(10000, 'ether'), now-coin_mature_time+1),# immature # } # create_dev_fixed_blockchain_database(testdb, key_balance_dict) chain = MainnetChain(testdb, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) # class NodeStakingScore(rlp.Serializable, metaclass=ABCMeta): # fields = [ # ('recipient_node_wallet_address', address), # ('score', f_big_endian_int), # ('since_block_number', f_big_endian_int), # ('timestamp', f_big_endian_int), # ('v', big_endian_int), # ('r', big_endian_int), # ('s', big_endian_int), # ] node_staking_scores = [] score = 1000000 for private_key in private_keys: node_staking_score = NodeStakingScore(recipient_node_wallet_address = GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), score = int(score), since_block_number = 0, timestamp = int(time.time()), head_hash_of_sender_chain = chain.chaindb.get_canonical_head_hash(private_key.public_key.to_canonical_address()), v = 0, r = 0, s = 0, ) signed_node_staking_score = node_staking_score.get_signed(private_key,MAINNET_NETWORK_ID) node_staking_scores.append(signed_node_staking_score) score = score/5 chain = MainnetChain(testdb, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), GENESIS_PRIVATE_KEY) node_staking_scores.sort(key=lambda x: -1* x.score) for node_staking_score in node_staking_scores: node_staking_score.validate() print(node_staking_score.is_signature_valid) print(node_staking_score.sender) print(node_staking_score.score, chain.get_mature_stake(node_staking_score.sender, node_staking_score.timestamp)) reward_bundle = chain.get_consensus_db().create_reward_bundle_for_block(GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), node_staking_scores, at_timestamp = int(time.time())) chain.get_consensus_db().validate_reward_bundle(reward_bundle, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), int(time.time())) print('AAAAAAAAAAA') print(reward_bundle.reward_type_1.amount) print(reward_bundle.reward_type_2.amount) print(reward_bundle.reward_type_2.proof[0].score) initial_balance = chain.get_vm().state.account_db.get_balance(GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) print("balance before reward = ", initial_balance) chain.import_current_queue_block_with_reward(reward_bundle.reward_type_2.proof) final_balance = chain.get_vm().state.account_db.get_balance(GENESIS_PRIVATE_KEY.public_key.to_canonical_address()) print("balance after reward = ",final_balance) assert((reward_bundle.reward_type_1.amount + reward_bundle.reward_type_2.amount) == (final_balance- initial_balance)) print("waiting {} seconds before importing the next block".format(min_time_between_blocks)) time.sleep(min_time_between_blocks) proof_chain = MainnetChain(testdb, private_keys[1].public_key.to_canonical_address(), private_keys[1]) mature_stake = proof_chain.get_mature_stake() print("proof chain mature stake") print(mature_stake) staking_score = proof_chain.get_signed_peer_score_string_private_key(private_keys[1].to_bytes(), private_keys[0].public_key.to_canonical_address()) staking_score = staking_score.copy(score=1532) staking_score = staking_score.get_signed(private_keys[1], proof_chain.network_id) print('staking score') print(staking_score.score) # fields = [ # ('recipient_node_wallet_address', address), # ('score', f_big_endian_int), # a score out of 1,000,000 # ('since_block_number', f_big_endian_int), # ('timestamp', f_big_endian_int), # ('head_hash_of_sender_chain', hash32), # ('v', big_endian_int), # ('r', big_endian_int), # ('s', big_endian_int), # ] # reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) reward_bundle = reward_chain.get_consensus_db().create_reward_bundle_for_block(private_keys[0].public_key.to_canonical_address(), [staking_score],at_timestamp=Timestamp(int(time.time()))) print("reward type 2 amount") print(reward_bundle.reward_type_2.amount) print("reward type 2 proof") print(reward_bundle.reward_type_2.proof) reward_chain.import_current_queue_block_with_reward([staking_score]) # todo: this will fail if the reward block time is too long. Need to manually set it to a small number for the test... or manually make the blocks older? print("waiting {} seconds before importing the next block".format(min_time_between_blocks)) time.sleep(min_time_between_blocks) proof_chain = MainnetChain(testdb, private_keys[1].public_key.to_canonical_address(), private_keys[1]) mature_stake = proof_chain.get_mature_stake() print("proof chain mature stake") print(mature_stake) staking_score = proof_chain.get_signed_peer_score_string_private_key(private_keys[1].to_bytes(), private_keys[ 0].public_key.to_canonical_address()) staking_score = staking_score.copy(score=1000000) staking_score = staking_score.get_signed(private_keys[1], proof_chain.network_id) print('staking score') print(staking_score.score) # fields = [ # ('recipient_node_wallet_address', address), # ('score', f_big_endian_int), # a score out of 1,000,000 # ('since_block_number', f_big_endian_int), # ('timestamp', f_big_endian_int), # ('head_hash_of_sender_chain', hash32), # ('v', big_endian_int), # ('r', big_endian_int), # ('s', big_endian_int), # ] # reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0]) reward_bundle = reward_chain.get_consensus_db().create_reward_bundle_for_block( private_keys[0].public_key.to_canonical_address(), [staking_score], at_timestamp=Timestamp(int(time.time()))) print("reward type 2 amount") print(reward_bundle.reward_type_2.amount) print("reward type 2 proof") print(reward_bundle.reward_type_2.proof) reward_chain.import_current_queue_block_with_reward([staking_score])