示例#1
0
def test_invalid_proofs_no_proofs():

    testdb = create_reward_test_blockchain_database()

    chain = TestnetChain(testdb,
                         private_keys[0].public_key.to_canonical_address(),
                         private_keys[0])
    min_time_between_blocks = chain.get_vm(
        timestamp=Timestamp(int(time.time()))).min_time_between_blocks
    tx_list = [[
        private_keys[1], private_keys[0],
        to_wei(1, 'ether'),
        int(int(time.time()) - min_time_between_blocks * 10)
    ]]

    add_transactions_to_blockchain_db(testdb, tx_list)

    chain = TestnetChain(testdb,
                         private_keys[0].public_key.to_canonical_address(),
                         private_keys[0])

    required_number_of_proofs_for_reward_type_2_proof = chain.get_consensus_db(
        timestamp=Timestamp(int(
            time.time()))).required_number_of_proofs_for_reward_type_2_proof

    # Now we try to import the reward block with instance 0
    reward_chain = TestnetChain(
        testdb, private_keys[0].public_key.to_canonical_address(),
        private_keys[0])

    with pytest.raises(RewardAmountRoundsToZero):
        imported_block = reward_chain.import_current_queue_block_with_reward(
            [])
示例#2
0
async def test_import_block_with_high_gas(request, event_loop):

    simulate_importing_from_rpc = False
    # Blocks with timestamps before time.time() - ADDITIVE_SYNC_MODE_CUTOFF-TIME_BETWEEN_HEAD_HASH_SAVE should be rejected.
    new_tx_time = int(time.time() - ADDITIVE_SYNC_MODE_CUTOFF / 2)

    tx_list = [[GENESIS_PRIVATE_KEY, RECEIVER, 100, new_tx_time, 101]]
    new_blocks_db = get_fresh_db()
    add_transactions_to_blockchain_db(new_blocks_db, tx_list)

    expect_blocks_to_import = True

    node_new_blocks = MainnetChain(
        new_blocks_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address())

    new_blocks = node_new_blocks.get_all_chronological_blocks_for_window(
        int((new_tx_time) / TIME_BETWEEN_HEAD_HASH_SAVE) *
        TIME_BETWEEN_HEAD_HASH_SAVE)

    await _setup_test_import_blocks(
        request,
        event_loop,
        new_blocks_db,
        new_blocks,
        simulate_importing_from_rpc,
        expect_blocks_to_import=expect_blocks_to_import,
        node_min_gas_price=100)
def test_invalid_proofs_timestamp_in_past():

    testdb = create_reward_test_blockchain_database()

    chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0])
    min_time_between_blocks = chain.get_vm(timestamp=Timestamp(int(time.time()))).min_time_between_blocks
    tx_list = [[private_keys[1], private_keys[0], to_wei(1, 'ether'), int(int(time.time())-min_time_between_blocks*10)]]

    add_transactions_to_blockchain_db(testdb, tx_list)

    chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0])

    required_number_of_proofs_for_reward_type_2_proof = chain.get_consensus_db(timestamp=Timestamp(int(time.time()))).required_number_of_proofs_for_reward_type_2_proof

    node_staking_scores = []

    # First score/proof with timestamp far in future
    current_private_key = private_keys[1]
    node_staking_score = NodeStakingScore(
        recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(),
        score=int(1000000),
        since_block_number=0,
        timestamp=int(time.time())-60*10,
        head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash(
            current_private_key.public_key.to_canonical_address()),
        v=0,
        r=0,
        s=0,
    )
    signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID)
    node_staking_scores.append(signed_node_staking_score)

    score = 100000
    for i in range(2, 10):
        # Second score/proof is from instance 1
        current_private_key = private_keys[i]
        node_staking_score = NodeStakingScore(
            recipient_node_wallet_address=private_keys[0].public_key.to_canonical_address(),
            score=int(score-i),
            since_block_number=1,
            timestamp=int(time.time()),
            head_hash_of_sender_chain=chain.chaindb.get_canonical_head_hash(
                current_private_key.public_key.to_canonical_address()),
            v=0,
            r=0,
            s=0,
            )
        signed_node_staking_score = node_staking_score.get_signed(current_private_key, MAINNET_NETWORK_ID)
        node_staking_scores.append(signed_node_staking_score)

    # Now we try to import the reward block with instance 0
    reward_chain = MainnetChain(testdb, private_keys[0].public_key.to_canonical_address(), private_keys[0])

    with pytest.raises(ValidationError):
        reward_chain.import_current_queue_block_with_reward(node_staking_scores)
示例#4
0
async def test_sparse_sync_2(request, event_loop):
    '''
    Blockchain databases of client and server match up to a point within the consensus match stage, but there are additional
    blocks in the server's db after that time.
    :param request:
    :param event_loop:
    :return:
    '''

    genesis_time = int(time.time() / 1000) * 1000 - 1000 * 900
    equal_to_time = int(time.time() / 1000) * 1000 - 1000 * 890

    server_db = get_random_blockchain_to_time(genesis_time, equal_to_time)
    client_db = MemoryDB(kv_store=server_db.kv_store.copy())

    add_random_transactions_to_db_for_time_window(server_db, equal_to_time,
                                                  equal_to_time + 1000 * 5)

    tx_list = [[
        TESTNET_GENESIS_PRIVATE_KEY, RECEIVER, 100,
        int(time.time() / 1000) * 1000 - 1000 * 800
    ],
               [
                   TESTNET_GENESIS_PRIVATE_KEY, RECEIVER, 100,
                   int(time.time() / 1000) * 1000 - 1000 * 700
               ],
               [
                   TESTNET_GENESIS_PRIVATE_KEY, RECEIVER, 100,
                   int(time.time() / 1000) * 1000 - 1000 * 100
               ],
               [
                   TESTNET_GENESIS_PRIVATE_KEY, RECEIVER, 100,
                   int(time.time() / 1000) * 1000 - 1000 * 5
               ],
               [
                   TESTNET_GENESIS_PRIVATE_KEY, RECEIVER, 100,
                   int(time.time() / 1000) * 1000 - 1000 * 1
               ]]

    add_transactions_to_blockchain_db(client_db, tx_list)

    client_node = TestnetChain(
        client_db,
        TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address())
    client_node.min_gas_db.initialize_historical_minimum_gas_price_at_genesis(
        min_gas_price=1, net_tpc_cap=100)

    await _test_sync_with_variable_sync_parameters(
        request, event_loop, client_db, server_db,
        ensure_blockchain_databases_identical)
示例#5
0
async def test_additive_sync_4(request, event_loop):
    '''
    Blockchain databases of client and server match up to a point within the consensus match stage, but there are additional
    blocks in the server's db after that time.
    :param request:
    :param event_loop:
    :return:
    '''

    genesis_time = int(time.time() / 1000) * 1000 - 1000 * 25
    equal_to_time = int(time.time() / 1000) * 1000 - 1000 * 2

    server_db = get_random_blockchain_to_time(genesis_time, equal_to_time)
    client_db = MemoryDB(kv_store=server_db.kv_store.copy())

    tx_list = [[GENESIS_PRIVATE_KEY, RECEIVER, 100,
                int(time.time() - 2000)],
               [GENESIS_PRIVATE_KEY, RECEIVER, 100,
                int(time.time() - 1500)],
               [GENESIS_PRIVATE_KEY, RECEIVER, 100,
                int(time.time() - 1000)]]

    add_transactions_to_blockchain_db(server_db, tx_list)

    client_node = MainnetChain(
        client_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address())
    client_node.chaindb.initialize_historical_minimum_gas_price_at_genesis(
        min_gas_price=1, net_tpc_cap=100, tpc=1)
    server_node = MainnetChain(
        server_db, GENESIS_PRIVATE_KEY.public_key.to_canonical_address())
    server_node.chaindb.initialize_historical_minimum_gas_price_at_genesis(
        min_gas_price=1, net_tpc_cap=100, tpc=1)

    await _test_sync_with_variable_sync_parameters(
        request, event_loop, client_db, server_db,
        ensure_blockchain_databases_identical)
def test_boson_vm_calculate_reward_based_on_fractional_interest():
    testdb = MemoryDB()

    masternode_level_3_required_balance = MASTERNODE_LEVEL_3_REQUIRED_BALANCE
    masternode_level_3_multiplier = MASTERNODE_LEVEL_3_REWARD_TYPE_2_MULTIPLIER
    masternode_level_2_required_balance = MASTERNODE_LEVEL_2_REQUIRED_BALANCE
    masternode_level_2_multiplier = MASTERNODE_LEVEL_2_REWARD_TYPE_2_MULTIPLIER
    masternode_level_1_required_balance = MASTERNODE_LEVEL_1_REQUIRED_BALANCE
    masternode_level_1_multiplier = MASTERNODE_LEVEL_1_REWARD_TYPE_2_MULTIPLIER

    genesis_block_time = int(time.time()) - 10000000
    genesis_params, genesis_state = create_new_genesis_params_and_state(
        GENESIS_PRIVATE_KEY, masternode_level_3_required_balance * 2,
        genesis_block_time)

    time_between_blocks = max(MIN_TIME_BETWEEN_BLOCKS, 1)
    # import genesis block
    MainnetChain.from_genesis(
        testdb, GENESIS_PRIVATE_KEY.public_key.to_canonical_address(),
        genesis_params, genesis_state)

    stake_start = genesis_block_time + time_between_blocks
    tx_list = [[
        GENESIS_PRIVATE_KEY, RECEIVER, masternode_level_3_required_balance,
        stake_start
    ],
               [
                   RECEIVER, RECEIVER2,
                   (masternode_level_3_required_balance -
                    masternode_level_2_required_balance - GAS_TX),
                   stake_start + 100000
               ],
               [
                   RECEIVER, RECEIVER2,
                   (masternode_level_2_required_balance -
                    masternode_level_1_required_balance - GAS_TX),
                   stake_start + 200000
               ],
               [
                   RECEIVER, RECEIVER2,
                   (masternode_level_1_required_balance - 1000000 - GAS_TX),
                   stake_start + 300000
               ]]

    add_transactions_to_blockchain_db(testdb, tx_list)

    receiver_chain = MainnetChain(testdb,
                                  RECEIVER.public_key.to_canonical_address(),
                                  RECEIVER)

    fractional_interest = REWARD_TYPE_2_AMOUNT_FACTOR

    boson_fork_timestamp = 0
    for timestamp_vm_config in MainnetChain.vm_configuration:
        if timestamp_vm_config[1].fork == 'boson':
            boson_fork_timestamp = timestamp_vm_config[0]

    boson_vm = receiver_chain.get_vm(timestamp=boson_fork_timestamp)

    consensus_db = boson_vm.consensus_db

    calculate_at_timestamp = int(time.time())
    reward = consensus_db.calculate_reward_based_on_fractional_interest(
        RECEIVER.public_key.to_canonical_address(), fractional_interest,
        calculate_at_timestamp)

    if calculate_at_timestamp < EARLY_BIRD_BONUS_CUTOFF_TIMESTAMP:
        early_bird_bonus = EARLY_BIRD_BONUS_FACTOR
    else:
        early_bird_bonus = 1
    expected_reward_part_1 = fractional_interest * early_bird_bonus * (
        masternode_level_3_required_balance * 100000 *
        masternode_level_3_multiplier)
    expected_reward_part_2 = fractional_interest * early_bird_bonus * (
        masternode_level_2_required_balance * 100000 *
        masternode_level_2_multiplier)
    expected_reward_part_3 = fractional_interest * early_bird_bonus * (
        masternode_level_1_required_balance * 100000 *
        masternode_level_1_multiplier)
    expected_reward_part_4 = fractional_interest * early_bird_bonus * (
        1000000) * (calculate_at_timestamp - (stake_start + 300000) -
                    consensus_db.coin_mature_time_for_staking)

    # print("Expected calculation = {} * {} * {} * {}".format((calculate_at_timestamp-(stake_start+300000)-COIN_MATURE_TIME_FOR_STAKING), 1000000, fractional_interest, 1))
    # print("Expected calculation = {} * {} * {} * {}".format(100000, masternode_level_1_required_balance, fractional_interest, masternode_level_1_multiplier))
    # print("Expected calculation = {} * {} * {} * {}".format(100000, masternode_level_2_required_balance, fractional_interest, masternode_level_2_multiplier))
    # print("Expected calculation = {} * {} * {} * {}".format(100000, masternode_level_3_required_balance, fractional_interest, masternode_level_3_multiplier))
    #
    # print("Expected reward {}".format(int(expected_reward_part_4)))
    # print("Expected reward {}".format(int(expected_reward_part_4)+int(expected_reward_part_3)))
    # print("Expected reward {}".format(int(expected_reward_part_4)+int(expected_reward_part_3)+int(expected_reward_part_2)))
    # print("Expected reward {}".format(int(expected_reward_part_4)+int(expected_reward_part_3)+int(expected_reward_part_2)+int(expected_reward_part_1)))

    expected_reward = int(expected_reward_part_1) + int(
        expected_reward_part_2) + int(expected_reward_part_3) + int(
            expected_reward_part_4)
    assert (reward == expected_reward)
示例#7
0
async def _build_test_consensus(
        request,
        event_loop,
        genesis_block_timestamp=int(time.time() / 1000) * 1000 - 1000 * 1000 +
    1000,
        gap_between_genesis_block_and_first_transaction=1000,
        diverging_transactions_timestamp=None):
    '''
    This one creates a swarm of 4 nodes with one database, and 4 nodes with another database, then asks
    consensus which ones to choose. It checks to make sure they choose the correct one.
    The bootnode, and the first half of the peers have the same blockchain database
    The second half of the peers have a conflicting database
    Then finally, the client has only the genesis block and is asked to choose which database is in consensus.
    The first half of the peers have much more stake then the second half, so the client should choose the blockchain
    database from the first half of the nodes, which is also the one the bootnode has.
    :param request:
    :param event_loop:
    :return:
    '''

    num_peers_in_swarm = 8

    # If this is less than TIME_BETWEEN_HEAD_HASH_SAVE, it will result in FAST SYNC MODE because even the first
    # chronological block hash will be different.
    #gap_between_genesis_block_and_first_transaction = 1000

    base_db = MemoryDB()

    #genesis_block_timestamp = int(time.time()/1000)*1000 - 1000*1000 + 1000
    #genesis_block_timestamp = 1547288000

    private_keys = []
    for i in range(len(random_private_keys)):
        private_keys.append(keys.PrivateKey(random_private_keys[i]))

    if gap_between_genesis_block_and_first_transaction < MIN_TIME_BETWEEN_BLOCKS:
        gap_between_genesis_block_and_first_transaction = MIN_TIME_BETWEEN_BLOCKS

    tx_list = [
        *[[
            TESTNET_GENESIS_PRIVATE_KEY, private_keys[i],
            ((1000000 - 1000 * i) * 10**18), genesis_block_timestamp +
            gap_between_genesis_block_and_first_transaction +
            MIN_TIME_BETWEEN_BLOCKS * i
        ] for i in range(len(random_private_keys))]
    ]

    total_required_gas = sum([
        (to_wei(tx_key[4], 'gwei') if len(tx_key) > 4 else to_wei(1, 'gwei')) *
        GAS_TX for tx_key in tx_list
    ])

    genesis_chain_stake = 100

    required_total_supply = sum([
        x[2] for x in tx_list if x[0] == TESTNET_GENESIS_PRIVATE_KEY
    ]) + genesis_chain_stake + total_required_gas

    genesis_params, genesis_state = create_new_genesis_params_and_state(
        TESTNET_GENESIS_PRIVATE_KEY, required_total_supply,
        genesis_block_timestamp)

    # import genesis block
    TestnetChain.from_genesis(
        base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(),
        genesis_params, genesis_state)

    # Client db has only the genesis block
    client_db = MemoryDB(base_db.kv_store.copy())

    add_transactions_to_blockchain_db(base_db, tx_list)

    # chain = TestnetChain(base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(), TESTNET_GENESIS_PRIVATE_KEY)
    # print('AAAAAAAAAAA')
    # print('genesis')
    # print(chain.get_vm().state.account_db.get_balance(TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address()))
    # for i in range(len(random_private_keys)):
    #     print(i)
    #     print(chain.get_vm().state.account_db.get_balance(private_keys[i].public_key.to_canonical_address()))
    # exit()
    # stake for the first half of chains should be from node 1 to node n:
    # 100
    # 1000000
    # 999000
    # 998000
    # 997000
    # 996000
    # 995000
    # 994000
    # 993000
    # 992000

    peer_dbs = []
    for i in range(int(num_peers_in_swarm / 2)):
        peer_dbs.append(MemoryDB(base_db.kv_store.copy()))

    #last_block_timestamp = tx_list[-1][-1]
    # additional_tx_list_for_competing_db = [
    #     [private_keys[4], private_keys[1], 100, last_block_timestamp + TIME_BETWEEN_HEAD_HASH_SAVE + MIN_TIME_BETWEEN_BLOCKS * 1],
    #     [private_keys[4], private_keys[2], 100, last_block_timestamp + TIME_BETWEEN_HEAD_HASH_SAVE + MIN_TIME_BETWEEN_BLOCKS * 2],
    #     [private_keys[4], private_keys[3], 100, last_block_timestamp + TIME_BETWEEN_HEAD_HASH_SAVE + MIN_TIME_BETWEEN_BLOCKS * 3],
    # ]

    if diverging_transactions_timestamp is None:
        diverging_transactions_timestamp = tx_list[-1][
            -1] + TIME_BETWEEN_HEAD_HASH_SAVE

    additional_tx_list_for_competing_db = [
        [
            private_keys[4], private_keys[1], 100,
            diverging_transactions_timestamp + MIN_TIME_BETWEEN_BLOCKS * 0
        ],
        [
            private_keys[4], private_keys[2], 100,
            diverging_transactions_timestamp + MIN_TIME_BETWEEN_BLOCKS * 1
        ],
        [
            private_keys[4], private_keys[3], 100,
            diverging_transactions_timestamp + MIN_TIME_BETWEEN_BLOCKS * 2
        ],
    ]
    competing_base_db = MemoryDB(base_db.kv_store.copy())
    add_transactions_to_blockchain_db(competing_base_db,
                                      additional_tx_list_for_competing_db)

    # stake for the second half of chains should be from node 1 to node n:
    # 100
    # 1000000
    # 999100
    # 998100
    # 997100
    # 932700
    # 995000
    # 994000
    # 993000
    # 992000

    # for peer node 7 for root hash 1
    # 100 + 997100 + 996100 + 995100 + 930700

    for i in range(int(num_peers_in_swarm / 2), num_peers_in_swarm):
        peer_dbs.append(MemoryDB(competing_base_db.kv_store.copy()))

    bootstrap_node = TestnetChain(
        base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address())
    bootstrap_node.chaindb.initialize_historical_minimum_gas_price_at_genesis(
        min_gas_price=1, net_tpc_cap=100, tpc=1)
    consensus_root_hash_timestamps = bootstrap_node.chain_head_db.get_historical_root_hashes(
    )

    async def validation(consensus_services):
        for i in range(len(consensus_services)):
            client_consensus = consensus_services[i]
            for timestamp, root_hash in consensus_root_hash_timestamps:
                client_consensus_choice = await client_consensus.coro_get_root_hash_consensus(
                    timestamp)
                assert (client_consensus_choice == root_hash)

            #consensus_service 0 is bootnode, it is in consensus
            #consensus_service 1 is the client. It only has genesis block and is not in consensus
            #consensus_services 2 to 2+int(num_peers_in_swarm/2) are in consensus
            #the rest of the peers are not in consensus
            await client_consensus.get_blockchain_sync_parameters()
            if i in [0, *[j + 2 for j in range(int(num_peers_in_swarm / 2))]]:
                sync_parameters = await client_consensus.get_blockchain_sync_parameters(
                )
                assert sync_parameters == None
            if i == 1:
                sync_parameters = await client_consensus.get_blockchain_sync_parameters(
                    debug=True)
                if (
                        genesis_block_timestamp +
                        gap_between_genesis_block_and_first_transaction
                ) < int(
                        time.time() / 1000
                ) * 1000 - 1000 * 1000 + 4 * 1000 or gap_between_genesis_block_and_first_transaction < TIME_BETWEEN_HEAD_HASH_SAVE:
                    assert sync_parameters.timestamp_for_root_hash == int(
                        (time.time() - TIME_OFFSET_TO_FAST_SYNC_TO) /
                        1000) * 1000
                else:
                    assert sync_parameters.timestamp_for_root_hash == int(
                        (genesis_block_timestamp +
                         gap_between_genesis_block_and_first_transaction) /
                        1000) * 1000 + 1000
            if i in [
                    j + 2 for j in range(int(num_peers_in_swarm /
                                             2), num_peers_in_swarm)
            ]:
                timestamp = int(
                    diverging_transactions_timestamp / 1000) * 1000 + 1000
                sync_parameters = await client_consensus.get_blockchain_sync_parameters(
                    debug=True)
                if timestamp > int(time.time()) - 1000 * 1000 + 1000 * 4:
                    assert sync_parameters.timestamp_for_root_hash == timestamp
                else:
                    assert sync_parameters.timestamp_for_root_hash == int(
                        (time.time() - TIME_OFFSET_TO_FAST_SYNC_TO) /
                        1000) * 1000

    await _test_consensus_swarm(request, event_loop, base_db, client_db,
                                peer_dbs, validation)
示例#8
0
async def test_consensus_avg_network_min_gas(request, event_loop):

    num_peers_in_swarm = 6

    base_db = MemoryDB()
    create_predefined_blockchain_database(base_db)

    tx_list = [[
        TESTNET_GENESIS_PRIVATE_KEY, private_keys[1], 1,
        int(time.time())
    ]]
    add_transactions_to_blockchain_db(base_db, tx_list)

    client_db = MemoryDB(kv_store=base_db.kv_store.copy())

    peer_dbs = []
    for i in range(num_peers_in_swarm):
        peer_dbs.append(MemoryDB(kv_store=base_db.kv_store.copy()))

    # Set their minimum gas prices
    bootstrap_chain = TestnetChain(
        base_db, TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address())
    bootstrap_chain.min_gas_db.initialize_historical_minimum_gas_price_at_genesis(
        min_gas_price=100, net_tpc_cap=1)

    client_chain = TestnetChain(
        client_db,
        TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address())
    client_chain.min_gas_db.initialize_historical_minimum_gas_price_at_genesis(
        min_gas_price=100, net_tpc_cap=1)

    for peer_db in peer_dbs:
        peer_chain = TestnetChain(
            peer_db,
            TESTNET_GENESIS_PRIVATE_KEY.public_key.to_canonical_address())
        peer_chain.min_gas_db.initialize_historical_minimum_gas_price_at_genesis(
            min_gas_price=random.randint(1, 1000), net_tpc_cap=1)

    bootstrap_historical_min_gas_price = bootstrap_chain.min_gas_db.load_historical_minimum_gas_price(
    )
    bootstrap_historical_network_tpc_capability = bootstrap_chain.min_gas_db.load_historical_network_tpc_capability(
    )

    async def validation(consensus_services: List[Consensus]):
        # avg_min_gas_limits = [await client_consensus.calculate_average_network_min_gas_limit() for client_consensus in consensus_services]
        # print(avg_min_gas_limits)
        # all_equal = all(x == avg_min_gas_limits[0] for x in avg_min_gas_limits)
        # assert(all_equal)

        # We also want to make sure that the nodes correctly initialized to the bootstrap node
        for consensus in consensus_services:
            chain = consensus.node.get_chain()
            node_historical_min_gas_price = chain.min_gas_db.load_historical_minimum_gas_price(
            )
            node_historical_network_tpc_capability = chain.min_gas_db.load_historical_network_tpc_capability(
            )

            assert (bootstrap_historical_min_gas_price[:-1] ==
                    node_historical_min_gas_price[:len(
                        bootstrap_historical_min_gas_price) - 1])
            assert (bootstrap_historical_network_tpc_capability[:-1] ==
                    node_historical_network_tpc_capability[:len(
                        bootstrap_historical_network_tpc_capability) - 1])

    async def wait_for_time(consensus_services):
        while True:
            # They should have the same parameters once they have received stats from all other nodes.
            length_of_stats = [
                len(consensus._network_min_gas_limit_statistics)
                for consensus in consensus_services
            ]
            print('ZZZZZZZZZZZZZZ')
            print(length_of_stats)
            if all([x >= (num_peers_in_swarm + 1) for x in length_of_stats]):
                return
            await asyncio.sleep(1)

    await _test_consensus_swarm(request,
                                event_loop,
                                base_db,
                                client_db,
                                peer_dbs,
                                validation,
                                waiting_function=wait_for_time)