def deploy_contract(source_account): last_block_hash = get_latest_block_hash() nonce = get_nonce_for_pk(source_account.account_id, source_account.pk) wasm_binary = utils.load_binary_file(WASM_FILENAME) tx = sign_deploy_contract_tx(source_account, wasm_binary, nonce + 1, last_block_hash) send_tx(tx)
def test_tx_status(): nodes = start_cluster(4, 0, 1, None, [["epoch_length", 1000], ["block_producer_kickout_threshold", 80], ["transaction_validity_period", 10000]], {}) signer_key = nodes[0].signer_key status = nodes[0].get_status() block_hash = status['sync_info']['latest_block_hash'] encoded_block_hash = base58.b58decode(block_hash.encode('ascii')) payment_tx = transaction.sign_payment_tx( signer_key, 'test1', 100, 1, encoded_block_hash) submit_tx_and_check(nodes, 0, payment_tx) deploy_contract_tx = transaction.sign_deploy_contract_tx( signer_key, load_test_contract(), 2, encoded_block_hash) submit_tx_and_check(nodes, 0, deploy_contract_tx) function_call_tx = transaction.sign_function_call_tx( signer_key, signer_key.account_id, 'write_key_value', struct.pack('<QQ', 42, 24), 300000000000000, 0, 3, encoded_block_hash) submit_tx_and_check(nodes, 0, deploy_contract_tx)
def deploy_contract(node): status = node.get_status() hash_ = status['sync_info']['latest_block_hash'] hash_ = base58.b58decode(hash_.encode('utf8')) tx = sign_deploy_contract_tx(node.signer_key, load_test_contract(), 10, hash_) node.send_tx_and_wait(tx, timeout=15) wait_for_blocks_or_timeout(node, 3, 100)
def deploy_contract(node): status = node.get_status() hash_ = status['sync_info']['latest_block_hash'] hash_ = base58.b58decode(hash_.encode('utf8')) tx = sign_deploy_contract_tx( node.signer_key, load_binary_file( '../runtime/near-test-contracts/res/test_contract_rs.wasm'), 10, hash_) node.send_tx_and_wait(tx, timeout=15) wait_for_blocks_or_timeout(node, 3, 100)
def test_max_gas_burnt_view(): nodes = start_cluster( 2, 0, 1, config=None, genesis_config_changes=[], client_config_changes={1: { 'max_gas_burnt_view': int(5e10) }}) contract_key = nodes[0].signer_key contract = load_binary_file( '../runtime/near-test-contracts/res/test_contract_rs.wasm') # Deploy the fib smart contract status = nodes[0].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] deploy_contract_tx = transaction.sign_deploy_contract_tx( contract_key, contract, 10, base58.b58decode(latest_block_hash.encode('utf8'))) deploy_contract_response = nodes[0].send_tx_and_wait( deploy_contract_tx, 10) def call_fib(node, n): args = base64.b64encode(bytes([n])).decode('ascii') return node.call_function(contract_key.account_id, 'fibonacci', args, timeout=10).get('result') # Call view function of the smart contract via the first node. This should # succeed. result = call_fib(nodes[0], 25) assert 'result' in result and 'error' not in result, ( 'Expected "result" and no "error" in response, got: {}'.format(result)) n = int.from_bytes(bytes(result['result']), 'little') assert n == 75025, 'Expected result to be 75025 but got: {}'.format(n) # Same but against the second node. This should fail because of gas limit. result = call_fib(nodes[1], 25) assert 'result' not in result and 'error' in result, ( 'Expected "error" and no "result" in response, got: {}'.format(result)) error = result['error'] assert 'HostError(GasLimitExceeded)' in error, ( 'Expected error due to GasLimitExceeded but got: {}'.format(error)) # It should still succeed for small arguments. result = call_fib(nodes[1], 5) assert 'result' in result and 'error' not in result, ( 'Expected "result" and no "error" in response, got: {}'.format(result)) n = int.from_bytes(bytes(result['result']), 'little') assert n == 5, 'Expected result to be 5 but got: {}'.format(n)
def test_outcome_proof(): nodes = start_cluster( 2, 0, 1, None, [["epoch_length", 1000], ["block_producer_kickout_threshold", 80]], {}) status = nodes[0].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] deploy_contract_tx = transaction.sign_deploy_contract_tx( nodes[0].signer_key, load_test_contract(), 10, base58.b58decode(latest_block_hash.encode('utf8'))) deploy_contract_response = nodes[0].send_tx_and_wait( deploy_contract_tx, 15) assert 'error' not in deploy_contract_response, deploy_contract_response check_transaction_outcome_proof(nodes, True, 20) check_transaction_outcome_proof(nodes, False, 30)
def test_tx_status(nodes, *, nonce_offset: int = 0): signer_key = nodes[0].signer_key encoded_block_hash = nodes[0].get_latest_block().hash_bytes payment_tx = transaction.sign_payment_tx(signer_key, 'test1', 100, nonce_offset + 1, encoded_block_hash) submit_tx_and_check(nodes, 0, payment_tx) deploy_contract_tx = transaction.sign_deploy_contract_tx( signer_key, load_test_contract(), nonce_offset + 2, encoded_block_hash) submit_tx_and_check(nodes, 0, deploy_contract_tx) function_call_tx = transaction.sign_function_call_tx( signer_key, signer_key.account_id, 'write_key_value', struct.pack('<QQ', 42, 24), 300000000000000, 0, nonce_offset + 3, encoded_block_hash) submit_tx_and_check(nodes, 0, deploy_contract_tx)
def test_deploy_contract(): nodes = start_cluster( 2, 0, 1, None, [["epoch_length", 10], ["block_producer_kickout_threshold", 80]], {}) last_block_hash = nodes[0].get_latest_block().hash_bytes tx = sign_deploy_contract_tx(nodes[0].signer_key, load_test_contract(), 10, last_block_hash) nodes[0].send_tx(tx) time.sleep(3) last_block_hash = nodes[1].get_latest_block().hash_bytes tx = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'log_something', [], 100000000000, 100000000000, 20, last_block_hash) res = nodes[1].send_tx_and_wait(tx, 20) import json print(json.dumps(res, indent=2)) assert res['result']['receipts_outcome'][0]['outcome']['logs'][0] == 'hello'
def test_key_value_changes(): """ Plan: 1. Deploy a contract. 2. Observe the code changes in the block where the transaction outcome "lands". 3. Send two transactions to be included into the same block setting and overriding the value of the same key (`my_key`). 4. Observe the changes in the block where the transaction outcome "lands". """ contract_key = nodes[0].signer_key hello_smart_contract = load_binary_file('../tests/hello.wasm') # Step 1 status = nodes[0].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] deploy_contract_tx = transaction.sign_deploy_contract_tx( contract_key, hello_smart_contract, 10, base58.b58decode(latest_block_hash.encode('utf8'))) deploy_contract_response = nodes[0].send_tx_and_wait( deploy_contract_tx, 10) # Step 2 block_hash = deploy_contract_response['result']['transaction_outcome'][ 'block_hash'] assert_changes_in_block_response(request={"block_id": block_hash}, expected_response={ "block_hash": block_hash, "changes": [{ "type": "account_touched", "account_id": contract_key.account_id, }, { "type": "contract_code_touched", "account_id": contract_key.account_id, }, { "type": "access_key_touched", "account_id": contract_key.account_id, }] }) base_request = { "block_id": block_hash, "changes_type": "contract_code_changes", } for request in [ # Test empty account_ids { **base_request, "account_ids": [] }, # Test an account_id that is a prefix of the original account_id { **base_request, "account_ids": [contract_key.account_id[:-1]] }, # Test an account_id that has the original account_id as a prefix { **base_request, "account_ids": [contract_key.account_id + '_extra'] }, ]: assert_changes_response(request=request, expected_response={ "block_hash": block_hash, "changes": [] }) # Test happy-path expected_response = { "block_hash": block_hash, "changes": [ { "cause": { "type": "receipt_processing", "receipt_hash": deploy_contract_response["result"]["receipts_outcome"][0] ["id"], }, "type": "contract_code_update", "change": { "account_id": contract_key.account_id, "code_base64": base64.b64encode(hello_smart_contract).decode('utf-8'), } }, ] } base_request = { "block_id": block_hash, "changes_type": "contract_code_changes", } for request in [ { **base_request, "account_ids": [contract_key.account_id] }, { **base_request, "account_ids": [ contract_key.account_id + '_non_existing1', contract_key.account_id, contract_key.account_id + '_non_existing2' ] }, ]: assert_changes_response(request=request, expected_response=expected_response) # Step 3 status = nodes[1].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] function_caller_key = nodes[0].signer_key def set_value_1(): function_call_1_tx = transaction.sign_function_call_tx( function_caller_key, contract_key.account_id, 'setKeyValue', json.dumps({ "key": "my_key", "value": "my_value_1" }).encode('utf-8'), 300000000000000, 100000000000, 20, base58.b58decode(latest_block_hash.encode('utf8'))) nodes[1].send_tx_and_wait(function_call_1_tx, 10) function_call_1_thread = threading.Thread(target=set_value_1) function_call_1_thread.start() function_call_2_tx = transaction.sign_function_call_tx( function_caller_key, contract_key.account_id, 'setKeyValue', json.dumps({ "key": "my_key", "value": "my_value_2" }).encode('utf-8'), 300000000000000, 100000000000, 30, base58.b58decode(latest_block_hash.encode('utf8'))) function_call_2_response = nodes[1].send_tx_and_wait( function_call_2_tx, 10) assert function_call_2_response['result']['receipts_outcome'][0]['outcome']['status'] == {'SuccessValue': ''}, \ "Expected successful execution, but the output was: %s" % function_call_2_response function_call_1_thread.join() tx_block_hash = function_call_2_response['result']['transaction_outcome'][ 'block_hash'] # Step 4 assert_changes_in_block_response(request={"block_id": tx_block_hash}, expected_response={ "block_hash": tx_block_hash, "changes": [ { "type": "account_touched", "account_id": contract_key.account_id, }, { "type": "access_key_touched", "account_id": contract_key.account_id, }, { "type": "data_touched", "account_id": contract_key.account_id, }, ] }) base_request = { "block_id": block_hash, "changes_type": "data_changes", "key_prefix_base64": base64.b64encode(b"my_key").decode('utf-8'), } for request in [ # Test empty account_ids { **base_request, "account_ids": [] }, # Test an account_id that is a prefix of the original account_id { **base_request, "account_ids": [contract_key.account_id[:-1]] }, # Test an account_id that has the original account_id as a prefix { **base_request, "account_ids": [contract_key.account_id + '_extra'] }, # Test non-existing key prefix { **base_request, "account_ids": [contract_key.account_id], "key_prefix_base64": base64.b64encode(b"my_key_with_extra").decode('utf-8'), }, ]: assert_changes_response(request=request, expected_response={ "block_hash": block_hash, "changes": [] }) # Test happy-path expected_response = { "block_hash": tx_block_hash, "changes": [{ "cause": { "type": "receipt_processing", }, "type": "data_update", "change": { "account_id": contract_key.account_id, "key_base64": base64.b64encode(b"my_key").decode('utf-8'), "value_base64": base64.b64encode(b"my_value_1").decode('utf-8'), } }, { "cause": { "type": "receipt_processing", "receipt_hash": function_call_2_response["result"]["receipts_outcome"][0] ["id"], }, "type": "data_update", "change": { "account_id": contract_key.account_id, "key_base64": base64.b64encode(b"my_key").decode('utf-8'), "value_base64": base64.b64encode(b"my_value_2").decode('utf-8'), } }] } base_request = { "block_id": tx_block_hash, "changes_type": "data_changes", "key_prefix_base64": base64.b64encode(b"my_key").decode('utf-8'), } for request in [ { **base_request, "account_ids": [contract_key.account_id] }, { **base_request, "account_ids": [ contract_key.account_id + '_non_existing1', contract_key.account_id, contract_key.account_id + '_non_existing2' ] }, { **base_request, "account_ids": [contract_key.account_id], "key_prefix_base64": base64.b64encode(b"").decode('utf-8'), }, { **base_request, "account_ids": [contract_key.account_id], "key_prefix_base64": base64.b64encode(b"my_ke").decode('utf-8'), }, ]: assert_changes_response( request=request, expected_response=expected_response, exclude_paths={"root['changes'][0]['cause']['receipt_hash']"}, )
def main(): node_root = "/tmp/near/upgradable" if os.path.exists(node_root): shutil.rmtree(node_root) subprocess.check_output('mkdir -p /tmp/near', shell=True) # TODO(#3285): use proper branch near_root, (stable_branch, current_branch) = branches.prepare_ab_test("1.13.0") # Setup local network. print([ "%snear-%s" % (near_root, stable_branch), "--home=%s" % node_root, "testnet", "--v", "4", "--prefix", "test" ]) subprocess.call([ "%snear-%s" % (near_root, stable_branch), "--home=%s" % node_root, "testnet", "--v", "4", "--prefix", "test" ]) genesis_config_changes = [ ("epoch_length", 20), ("num_block_producer_seats", 10), ("num_block_producer_seats_per_shard", [10]), ("block_producer_kickout_threshold", 80), ("chunk_producer_kickout_threshold", 80), ("chain_id", "testnet") ] node_dirs = [os.path.join(node_root, 'test%d' % i) for i in range(4)] for i, node_dir in enumerate(node_dirs): cluster.apply_genesis_changes(node_dir, genesis_config_changes) # Start 3 stable nodes and one current node. config = { "local": True, 'near_root': near_root, 'binary_name': "near-%s" % stable_branch } nodes = [cluster.spin_up_node( config, near_root, node_dirs[0], 0, None, None)] for i in range(1, 3): nodes.append(cluster.spin_up_node( config, near_root, node_dirs[i], i, nodes[0].node_key.pk, nodes[0].addr())) config["binary_name"] = "near-%s" % current_branch nodes.append(cluster.spin_up_node( config, near_root, node_dirs[3], 3, nodes[0].node_key.pk, nodes[0].addr())) time.sleep(2) # deploy a contract status = nodes[0].get_status() hash = status['sync_info']['latest_block_hash'] tx = sign_deploy_contract_tx( nodes[0].signer_key, load_binary_file( '../runtime/near-vm-runner/tests/res/test_contract_rs.wasm'), 1, base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res # write some random value tx = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'write_random_value', [], 10**13, 0, 2, base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res # hex_account_id = (b"I'm hex!" * 4).hex() hex_account_id = '49276d206865782149276d206865782149276d206865782149276d2068657821' tx = sign_payment_tx(key=nodes[0].signer_key, to=hex_account_id, amount=10 ** 25, nonce=3, blockHash=base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) # Account doesn't exist assert 'error' not in res, res assert 'Failure' in res['result']['status'], res # No account res = nodes[0].get_account(hex_account_id) assert 'error' in res, res wait_for_blocks_or_timeout(nodes[0], 20, 120) # Restart stable nodes into new version. for i in range(3): nodes[i].kill() nodes[i].binary_name = config['binary_name'] nodes[i].start(nodes[0].node_key.pk, nodes[0].addr()) wait_for_blocks_or_timeout(nodes[3], 60, 120) status0 = nodes[0].get_status() status3 = nodes[3].get_status() protocol_version = status0['protocol_version'] latest_protocol_version = status3["latest_protocol_version"] assert protocol_version == latest_protocol_version, \ "Latest protocol version %d should match active protocol version %d" % (latest_protocol_version, protocol_version) hash = status0['sync_info']['latest_block_hash'] # write some random value again tx = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'write_random_value', [], 10**13, 0, 4, base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res tx = sign_payment_tx(key=nodes[0].signer_key, to=hex_account_id, amount=10 ** 25, nonce=5, blockHash=base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) # Successfully created a new account on transfer to hex assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res hex_account_balance = int(nodes[0].get_account(hex_account_id)['result']['amount']) assert hex_account_balance == 10 ** 25
sys.path.append(str(pathlib.Path(__file__).resolve().parents[2] / 'lib')) from cluster import start_cluster from configured_logger import logger from transaction import sign_deploy_contract_tx, sign_function_call_tx from utils import load_test_contract nodes = start_cluster( 4, 1, 4, None, [["epoch_length", 10], ["block_producer_kickout_threshold", 80]], {4: { "tracked_shards": [0, 1, 2, 3] }}) # Deploy contract hash_ = nodes[0].get_latest_block().hash_bytes tx = sign_deploy_contract_tx(nodes[0].signer_key, load_test_contract(), 10, hash_) nodes[0].send_tx(tx) time.sleep(3) # Write 10 values to storage for i in range(10): hash_ = nodes[1].get_latest_block().hash_bytes keyvalue = bytearray(16) keyvalue[0] = i keyvalue[8] = i tx2 = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'write_key_value', bytes(keyvalue), 10000000000000, 100000000000, 20 + i * 10, hash_)
def test_upgrade() -> None: """Test that upgrade from ‘stable’ to ‘current’ binary is possible. 1. Start a network with 3 `stable` nodes and 1 `new` node. 2. Start switching `stable` nodes one by one with `new` nodes. 3. Run for three epochs and observe that current protocol version of the network matches `new` nodes. """ executables = get_executables() node_root = utils.get_near_tempdir('upgradable', clean=True) # Setup local network. # TODO(#4372): testnet subcommand deprecated since 1.24. Replace with # localnet after a couple of releases in 2022. cmd = (executables.stable.neard, "--home=%s" % node_root, "testnet", "--v", "4", "--prefix", "test") logger.info(' '.join(str(arg) for arg in cmd)) subprocess.check_call(cmd) genesis_config_changes = [("epoch_length", 20), ("num_block_producer_seats", 10), ("num_block_producer_seats_per_shard", [10]), ("block_producer_kickout_threshold", 80), ("chunk_producer_kickout_threshold", 80), ("chain_id", "testnet")] node_dirs = [os.path.join(node_root, 'test%d' % i) for i in range(4)] for i, node_dir in enumerate(node_dirs): cluster.apply_genesis_changes(node_dir, genesis_config_changes) cluster.apply_config_changes(node_dir, {'tracked_shards': [0]}) # Start 3 stable nodes and one current node. config = executables.stable.node_config() nodes = [ cluster.spin_up_node(config, executables.stable.root, node_dirs[0], 0) ] for i in range(1, 3): nodes.append( cluster.spin_up_node(config, executables.stable.root, node_dirs[i], i, boot_node=nodes[0])) config = executables.current.node_config() nodes.append( cluster.spin_up_node(config, executables.current.root, node_dirs[3], 3, boot_node=nodes[0])) time.sleep(2) # deploy a contract hash = nodes[0].get_latest_block().hash_bytes tx = sign_deploy_contract_tx(nodes[0].signer_key, utils.load_test_contract(), 1, hash) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res # write some random value tx = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'write_random_value', [], 10**13, 0, 2, hash) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res utils.wait_for_blocks(nodes[0], count=20) # Restart stable nodes into new version. for i in range(3): nodes[i].kill() nodes[i].binary_name = config['binary_name'] nodes[i].start(boot_node=nodes[0]) utils.wait_for_blocks(nodes[3], count=60) status0 = nodes[0].get_status() status3 = nodes[3].get_status() protocol_version = status0['protocol_version'] latest_protocol_version = status3["latest_protocol_version"] assert protocol_version == latest_protocol_version, \ "Latest protocol version %d should match active protocol version %d" % ( latest_protocol_version, protocol_version) hash = base58.b58decode( status0['sync_info']['latest_block_hash'].encode('ascii')) # write some random value again tx = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'write_random_value', [], 10**13, 0, 4, hash) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res # hex_account_id = (b"I'm hex!" * 4).hex() hex_account_id = '49276d206865782149276d206865782149276d206865782149276d2068657821' tx = sign_payment_tx(key=nodes[0].signer_key, to=hex_account_id, amount=10**25, nonce=5, blockHash=hash) res = nodes[0].send_tx_and_wait(tx, timeout=20) # Successfully created a new account on transfer to hex assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res hex_account_balance = int( nodes[0].get_account(hex_account_id)['result']['amount']) assert hex_account_balance == 10**25
nodes = start_cluster( 3, 0, 4, None, [["epoch_length", 1000], ["block_producer_kickout_threshold", 80]], {}) wasm_blob_1 = load_binary_file( '../runtime/near-vm-runner/tests/res/test_contract_rs.wasm') status = nodes[0].get_status() hash_ = status['sync_info']['latest_block_hash'] hash_ = base58.b58decode(hash_.encode('utf8')) for iter_ in range(10): print("Deploying garbage contract #%s" % iter_) wasm_blob = bytes( [random.randint(0, 255) for _ in range(random.randint(200, 500))]) tx = sign_deploy_contract_tx(nodes[0].signer_key, wasm_blob, 10 + iter_, hash_) nodes[0].send_tx_and_wait(tx, 5) for iter_ in range(10): print("Deploying perturbed contract #%s" % iter_) new_name = '%s_mething' % iter_ new_output = '%s_llo' % iter_ wasm_blob = wasm_blob_1.replace(bytes('something', 'utf8'), bytes(new_name, 'utf8')).replace( bytes('hello', 'utf8'), bytes(new_output, 'utf8')) assert len(wasm_blob) == len(wasm_blob_1) pos = random.randint(0, len(wasm_blob_1) - 1)
print() # Test balance transfers initial_balances = [int(nodes[0].get_account(account.account_id)['result']['amount']) for account in accounts] nonces = [nodes[0].get_nonce_for_pk(account.account_id, account.pk) for account in accounts] print("INITIAL BALANCES", initial_balances) print("NONCES", nonces) last_block_hash = nodes[0].get_status()['sync_info']['latest_block_hash'] last_block_hash_decoded = base58.b58decode(last_block_hash.encode('utf8')) tx = sign_payment_tx(accounts[0], accounts[1].account_id, 100, nonces[0] + 1, last_block_hash_decoded) nodes[0].send_tx_and_wait(tx, timeout=10) new_balances = [int(nodes[0].get_account(account.account_id)['result']['amount']) for account in accounts] print("NEW BALANCES", new_balances) assert (new_balances[0] + 100) % 1000 == initial_balances[0] % 1000 assert (initial_balances[1] + 100) % 1000 == new_balances[1] % 1000 # Test contract deployment tx = sign_deploy_contract_tx(accounts[2], load_binary_file('../runtime/near-vm-runner/tests/res/test_contract_rs.wasm'), nonces[2] + 1, last_block_hash_decoded) nodes[0].send_tx_and_wait(tx, timeout=20) tx2 = sign_function_call_tx(accounts[2], accounts[2].account_id, 'log_something', [], 100000000000, 100000000000, nonces[2] + 2, last_block_hash_decoded) res = nodes[1].send_tx_and_wait(tx2, 10) assert res['result']['receipts_outcome'][0]['outcome']['logs'][0] == 'hello'
sys.path.append('lib') from cluster import start_cluster from transaction import sign_deploy_contract_tx, sign_function_call_tx from utils import load_binary_file, compile_rust_contract nodes = start_cluster( 4, 0, 4, None, [["epoch_length", 10], ["block_producer_kickout_threshold", 80]], {}) # Deploy contract status = nodes[0].get_status() hash_ = status['sync_info']['latest_block_hash'] hash_ = base58.b58decode(hash_.encode('utf8')) tx = sign_deploy_contract_tx( nodes[0].signer_key, load_binary_file( '../runtime/near-vm-runner/tests/res/test_contract_rs.wasm'), 10, hash_) nodes[0].send_tx(tx) time.sleep(3) # Write 10 values to storage for i in range(10): status2 = nodes[1].get_status() hash_2 = status2['sync_info']['latest_block_hash'] hash_2 = base58.b58decode(hash_2.encode('utf8')) keyvalue = bytearray(16) keyvalue[0] = i keyvalue[8] = i tx2 = sign_function_call_tx(nodes[0].signer_key,
return borsh_res nodes = start_cluster( 2, 0, 1, None, [["epoch_length", 1000], ["block_producer_kickout_threshold", 80]], {} ) # deploy a smart contract for testing contract_key = nodes[0].signer_key hello_smart_contract = load_binary_file('../tests/hello.wasm') status = nodes[0].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] deploy_contract_tx = transaction.sign_deploy_contract_tx( contract_key, hello_smart_contract, 10, base58.b58decode(latest_block_hash.encode('utf8'))) deploy_contract_response = nodes[0].send_tx_and_wait(deploy_contract_tx, 15) assert 'error' not in deploy_contract_response, deploy_contract_response def check_transaction_outcome_proof(should_succeed, nonce): status = nodes[1].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] function_caller_key = nodes[0].signer_key gas = 10000000000000000 if should_succeed else 1000 function_call_1_tx = transaction.sign_function_call_tx( function_caller_key, contract_key.account_id, 'setKeyValue', json.dumps({ "key": "my_key",
logger.info(f"NONCES {nonces}") last_block_hash = nodes[0].get_status()['sync_info']['latest_block_hash'] last_block_hash_decoded = base58.b58decode(last_block_hash.encode('utf8')) tx = sign_payment_tx(accounts[0], accounts[1].account_id, 100, nonces[0] + 1, last_block_hash_decoded) nodes[0].send_tx_and_wait(tx, timeout=10) new_balances = [ int(nodes[0].get_account(account.account_id)['result']['amount']) for account in accounts ] logger.info("NEW BALANCES {new_balances}") assert (new_balances[0] + 100) % 1000 == initial_balances[0] % 1000 assert (initial_balances[1] + 100) % 1000 == new_balances[1] % 1000 # Test contract deployment tx = sign_deploy_contract_tx(accounts[2], load_test_contract(), nonces[2] + 1, last_block_hash_decoded) nodes[0].send_tx_and_wait(tx, timeout=20) tx2 = sign_function_call_tx(accounts[2], accounts[2].account_id, 'log_something', [], 100000000000, 100000000000, nonces[2] + 2, last_block_hash_decoded) res = nodes[1].send_tx_and_wait(tx2, 10) assert res['result']['receipts_outcome'][0]['outcome']['logs'][0] == 'hello'
def deploy_contract(node): hash_ = node.get_latest_block().hash_bytes tx = sign_deploy_contract_tx(node.signer_key, utils.load_test_contract(), 10, hash_) node.send_tx_and_wait(tx, timeout=15) utils.wait_for_blocks(node, count=3)
import sys, time import base58 sys.path.append('lib') from cluster import start_cluster from transaction import sign_deploy_contract_tx, sign_function_call_tx from utils import load_binary_file, load_test_contract, compile_rust_contract nodes = start_cluster( 4, 0, 4, None, [["epoch_length", 10], ["block_producer_kickout_threshold", 80]], {}) status = nodes[0].get_status() hash_ = status['sync_info']['latest_block_hash'] hash_ = base58.b58decode(hash_.encode('utf8')) tx = sign_deploy_contract_tx(nodes[0].signer_key, load_test_contract(), 10, hash_) nodes[0].send_tx(tx) time.sleep(3) status2 = nodes[1].get_status() hash_2 = status2['sync_info']['latest_block_hash'] hash_2 = base58.b58decode(hash_2.encode('utf8')) tx2 = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'log_something', [], 100000000000, 100000000000, 20, hash_2) res = nodes[1].send_tx_and_wait(tx2, 20) assert res['result']['receipts_outcome'][0]['outcome']['logs'][0] == 'hello' wasm_file = compile_rust_contract(''' metadata! {
nodes = start_cluster( 4, 0, 1, None, [["epoch_length", 1000], ["block_producer_kickout_threshold", 80], ["transaction_validity_period", 10000]], {}) status = nodes[0].get_status() block_hash = status['sync_info']['latest_block_hash'] print("1") payment_tx = transaction.sign_payment_tx( nodes[0].signer_key, 'test1', 100, 1, base58.b58decode(block_hash.encode('utf8'))) submit_tx_and_check(nodes[0], payment_tx) print("2") deploy_contract_tx = transaction.sign_deploy_contract_tx( nodes[0].signer_key, load_binary_file('../tests/hello.wasm'), 2, base58.b58decode(block_hash.encode('utf8'))) submit_tx_and_check(nodes[0], deploy_contract_tx) print("3") function_call_tx = transaction.sign_function_call_tx( nodes[0].signer_key, nodes[0].signer_key.account_id, 'setKeyValue', json.dumps({ "key": "my_key", "value": "my_value_1" }).encode('utf-8'), 300000000000000, 0, 3, base58.b58decode(block_hash.encode('utf8'))) submit_tx_and_check(nodes[0], deploy_contract_tx)
import base58 sys.path.append('lib') from cluster import start_cluster from transaction import sign_deploy_contract_tx, sign_function_call_tx from utils import load_binary_file, compile_rust_contract nodes = start_cluster( 4, 0, 4, None, [["epoch_length", 10], ["block_producer_kickout_threshold", 80]], {}) status = nodes[0].get_status() hash_ = status['sync_info']['latest_block_hash'] hash_ = base58.b58decode(hash_.encode('utf8')) tx = sign_deploy_contract_tx( nodes[0].signer_key, load_binary_file( '../runtime/near-test-contracts/res/test_contract_rs.wasm'), 10, hash_) nodes[0].send_tx(tx) time.sleep(3) status2 = nodes[1].get_status() hash_2 = status2['sync_info']['latest_block_hash'] hash_2 = base58.b58decode(hash_2.encode('utf8')) tx2 = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'log_something', [], 100000000000, 100000000000, 20, hash_2) res = nodes[1].send_tx_and_wait(tx2, 20) assert res['result']['receipts_outcome'][0]['outcome']['logs'][0] == 'hello' wasm_file = compile_rust_contract('''
def send_deploy_contract_tx(self, wasm_filename, base_block_hash=None): wasm_binary = load_binary_file(wasm_filename) self.prep_tx() tx = sign_deploy_contract_tx(self.key, wasm_binary, self.nonce, base_block_hash or self.base_block_hash) return self.send_tx(tx)
def test_key_value_changes(): """ Plan: 1. Deploy a contract. 2. Observe the code changes in the block where the transaction outcome "lands". 3. Send two transactions to be included into the same block setting and overriding the value of the same key. 4. Observe the changes in the block where the transaction outcome "lands". """ contract_key = nodes[0].signer_key contract_blob = load_test_contract() # Step 1 status = nodes[0].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] deploy_contract_tx = transaction.sign_deploy_contract_tx( contract_key, contract_blob, 10, base58.b58decode(latest_block_hash.encode('utf8'))) deploy_contract_response = nodes[0].send_tx_and_wait( deploy_contract_tx, 10) # Step 2 block_hash = deploy_contract_response['result']['transaction_outcome'][ 'block_hash'] assert_changes_in_block_response(request={"block_id": block_hash}, expected_response={ "block_hash": block_hash, "changes": [{ "type": "account_touched", "account_id": contract_key.account_id, }, { "type": "contract_code_touched", "account_id": contract_key.account_id, }, { "type": "access_key_touched", "account_id": contract_key.account_id, }] }) base_request = { "block_id": block_hash, "changes_type": "contract_code_changes", } for request in [ # Test empty account_ids { **base_request, "account_ids": [] }, # Test an account_id that is a prefix of the original account_id { **base_request, "account_ids": [contract_key.account_id[:-1]] }, # Test an account_id that has the original account_id as a prefix { **base_request, "account_ids": [contract_key.account_id + '_extra'] }, ]: assert_changes_response(request=request, expected_response={ "block_hash": block_hash, "changes": [] }) # Test happy-path expected_response = { "block_hash": block_hash, "changes": [ { "cause": { "type": "receipt_processing", "receipt_hash": deploy_contract_response["result"]["receipts_outcome"][0] ["id"], }, "type": "contract_code_update", "change": { "account_id": contract_key.account_id, "code_base64": base64.b64encode(contract_blob).decode('utf-8'), } }, ] } base_request = { "block_id": block_hash, "changes_type": "contract_code_changes", } for request in [ { **base_request, "account_ids": [contract_key.account_id] }, { **base_request, "account_ids": [ contract_key.account_id + '_non_existing1', contract_key.account_id, contract_key.account_id + '_non_existing2' ] }, ]: assert_changes_response(request=request, expected_response=expected_response) # Step 3 status = nodes[1].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] function_caller_key = nodes[0].signer_key key = struct.pack('<Q', 42) key_base64 = base64.b64encode(key).decode('ascii') def set_value(value, *, nounce): args = key + struct.pack('<Q', value) tx = transaction.sign_function_call_tx( function_caller_key, contract_key.account_id, 'write_key_value', args, 300000000000000, 100000000000, nounce, base58.b58decode(latest_block_hash.encode('utf8'))) response = nodes[1].send_tx_and_wait(tx, 10) try: status = response['result']['receipts_outcome'][0]['outcome'][ 'status'] except (KeyError, IndexError): status = () assert 'SuccessValue' in status, ( "Expected successful execution, but the output was: %s" % response) return response thread = threading.Thread(target=lambda: set_value(10, nounce=20)) thread.start() response = set_value(20, nounce=30) thread.join() tx_block_hash = response['result']['transaction_outcome']['block_hash'] # Step 4 assert_changes_in_block_response(request={"block_id": tx_block_hash}, expected_response={ "block_hash": tx_block_hash, "changes": [ { "type": "account_touched", "account_id": contract_key.account_id, }, { "type": "access_key_touched", "account_id": contract_key.account_id, }, { "type": "data_touched", "account_id": contract_key.account_id, }, ] }) base_request = { "block_id": block_hash, "changes_type": "data_changes", "key_prefix_base64": key_base64, } for request in [ # Test empty account_ids { **base_request, "account_ids": [] }, # Test an account_id that is a prefix of the original account_id { **base_request, "account_ids": [contract_key.account_id[:-1]] }, # Test an account_id that has the original account_id as a prefix { **base_request, "account_ids": [contract_key.account_id + '_extra'] }, # Test non-existing key prefix { **base_request, "account_ids": [contract_key.account_id], "key_prefix_base64": base64.b64encode(struct.pack('<Q', 24)).decode('ascii'), }, ]: assert_changes_response(request=request, expected_response={ "block_hash": block_hash, "changes": [] }) # Test happy-path expected_response = { "block_hash": tx_block_hash, "changes": [{ "cause": { "type": "receipt_processing", }, "type": "data_update", "change": { "account_id": contract_key.account_id, "key_base64": key_base64, "value_base64": base64.b64encode(struct.pack('<Q', 10)).decode('ascii'), } }, { "cause": { "type": "receipt_processing", "receipt_hash": response["result"]["receipts_outcome"][0]["id"], }, "type": "data_update", "change": { "account_id": contract_key.account_id, "key_base64": key_base64, "value_base64": base64.b64encode(struct.pack('<Q', 20)).decode('ascii'), } }] } base_request = { "block_id": tx_block_hash, "changes_type": "data_changes", "key_prefix_base64": key_base64, } for request in [ { **base_request, "account_ids": [contract_key.account_id] }, { **base_request, "account_ids": [ contract_key.account_id + '_non_existing1', contract_key.account_id, contract_key.account_id + '_non_existing2' ] }, { **base_request, "account_ids": [contract_key.account_id], "key_prefix_base64": '', }, { **base_request, "account_ids": [contract_key.account_id], "key_prefix_base64": base64.b64encode(key[:3]).decode('ascii'), }, ]: assert_changes_response( request=request, expected_response=expected_response, exclude_paths={"root['changes'][0]['cause']['receipt_hash']"}, )
def main(): node_root = utils.get_near_tempdir('backward', clean=True) executables = branches.prepare_ab_test() # Setup local network. subprocess.check_call([ executables.stable.neard, "--home=%s" % node_root, # TODO(#4372): testnet subcommand deprecated since 1.24. Replace with # localnet after a couple of releases in 2022. "testnet", "--v", "2", "--prefix", "test" ]) # Run both binaries at the same time. config = executables.stable.node_config() stable_node = cluster.spin_up_node(config, executables.stable.root, str(node_root / 'test0'), 0) config = executables.current.node_config() current_node = cluster.spin_up_node(config, executables.current.root, str(node_root / 'test1'), 1, boot_node=stable_node) # Check it all works. BLOCKS = 100 max_height = -1 started = time.time() # Create account, transfer tokens, deploy contract, invoke function call block_hash = stable_node.get_latest_block().hash_bytes new_account_id = 'test_account.test0' new_signer_key = cluster.Key(new_account_id, stable_node.signer_key.pk, stable_node.signer_key.sk) create_account_tx = sign_create_account_with_full_access_key_and_balance_tx( stable_node.signer_key, new_account_id, new_signer_key, 10**24, 1, block_hash) res = stable_node.send_tx_and_wait(create_account_tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res transfer_tx = sign_payment_tx(stable_node.signer_key, new_account_id, 10**25, 2, block_hash) res = stable_node.send_tx_and_wait(transfer_tx, timeout=20) assert 'error' not in res, res block_height = stable_node.get_latest_block().height nonce = block_height * 1_000_000 - 1 tx = sign_deploy_contract_tx(new_signer_key, utils.load_test_contract(), nonce, block_hash) res = stable_node.send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res tx = sign_deploy_contract_tx(stable_node.signer_key, utils.load_test_contract(), 3, block_hash) res = stable_node.send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res tx = sign_function_call_tx(new_signer_key, new_account_id, 'write_random_value', [], 10**13, 0, nonce + 1, block_hash) res = stable_node.send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res data = json.dumps([{ "create": { "account_id": "test_account.test0", "method_name": "call_promise", "arguments": [], "amount": "0", "gas": 30000000000000, }, "id": 0 }, { "then": { "promise_index": 0, "account_id": "test0", "method_name": "call_promise", "arguments": [], "amount": "0", "gas": 30000000000000, }, "id": 1 }]) tx = sign_function_call_tx(stable_node.signer_key, new_account_id, 'call_promise', bytes(data, 'utf-8'), 90000000000000, 0, nonce + 2, block_hash) res = stable_node.send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res utils.wait_for_blocks(current_node, target=BLOCKS)
def main(): node_root = "/tmp/near/backward" if os.path.exists(node_root): shutil.rmtree(node_root) subprocess.check_output('mkdir -p /tmp/near', shell=True) branch = branches.latest_rc_branch() near_root, (stable_branch, current_branch) = branches.prepare_ab_test(branch) # Setup local network. subprocess.call([ "%snear-%s" % (near_root, stable_branch), "--home=%s" % node_root, "testnet", "--v", "2", "--prefix", "test" ]) # Run both binaries at the same time. config = { "local": True, 'near_root': near_root, 'binary_name': "near-%s" % stable_branch } stable_node = cluster.spin_up_node(config, near_root, os.path.join(node_root, "test0"), 0, None, None) config["binary_name"] = "near-%s" % current_branch current_node = cluster.spin_up_node(config, near_root, os.path.join(node_root, "test1"), 1, stable_node.node_key.pk, stable_node.addr()) # Check it all works. BLOCKS = 100 TIMEOUT = 150 max_height = -1 started = time.time() # Create account, transfer tokens, deploy contract, invoke function call status = stable_node.get_status() block_hash = base58.b58decode( status['sync_info']['latest_block_hash'].encode('utf-8')) new_account_id = 'test_account' new_signer_key = cluster.Key(new_account_id, stable_node.signer_key.pk, stable_node.signer_key.sk) create_account_tx = sign_create_account_with_full_access_key_and_balance_tx( stable_node.signer_key, new_account_id, new_signer_key, 10**24, 1, block_hash) res = stable_node.send_tx_and_wait(create_account_tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res transfer_tx = sign_payment_tx(stable_node.signer_key, new_account_id, 10**25, 2, block_hash) res = stable_node.send_tx_and_wait(transfer_tx, timeout=20) assert 'error' not in res, res status = stable_node.get_status() block_height = status['sync_info']['latest_block_height'] nonce = block_height * 1_000_000 - 1 tx = sign_deploy_contract_tx(new_signer_key, load_test_contract(), nonce, block_hash) res = stable_node.send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res tx = sign_function_call_tx(new_signer_key, new_account_id, 'write_random_value', [], 10**13, 0, nonce + 1, block_hash) res = stable_node.send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res data = json.dumps([{ "create": { "account_id": "near_2", "method_name": "call_promise", "arguments": [], "amount": "0", "gas": 30000000000000, }, "id": 0 }, { "then": { "promise_index": 0, "account_id": "near_3", "method_name": "call_promise", "arguments": [], "amount": "0", "gas": 30000000000000, }, "id": 1 }]) tx = sign_function_call_tx(new_signer_key, new_account_id, 'call_promise', bytes(data, 'utf-8'), 90000000000000, 0, nonce + 2, block_hash) res = stable_node.send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res while max_height < BLOCKS: assert time.time() - started < TIMEOUT status = current_node.get_status() cur_height = status['sync_info']['latest_block_height'] if cur_height > max_height: max_height = cur_height time.sleep(1)
import base58 import json import deepdiff sys.path.append('lib') from cluster import start_cluster from transaction import sign_deploy_contract_tx, sign_function_call_tx from utils import load_binary_file nodes = start_cluster(4, 0, 1, None, [["epoch_length", 10], ["block_producer_kickout_threshold", 80]], {}) status = nodes[0].get_status() hash_ = status['sync_info']['latest_block_hash'] hash_ = base58.b58decode(hash_.encode('utf8')) tx = sign_deploy_contract_tx(nodes[0].signer_key, load_binary_file('../tests/hello.wasm'), 10, hash_) nodes[0].send_tx(tx) time.sleep(3) status2 = nodes[1].get_status() hash_2 = status2['sync_info']['latest_block_hash'] hash_2 = base58.b58decode(hash_2.encode('utf8')) tx2 = sign_function_call_tx( nodes[0].signer_key, 'setKeyValue', json.dumps({"key": "mykey", "value": "myvalue"}).encode('utf-8'), 100000000000, 100000000000, 20, hash_2
def main(): node_root = "/tmp/near/upgradable" if os.path.exists(node_root): shutil.rmtree(node_root) subprocess.check_output('mkdir -p /tmp/near', shell=True) branch = branches.latest_rc_branch() logger.info(f"Latest rc release branch is {branch}") near_root, (stable_branch, current_branch) = branches.prepare_ab_test(branch) # Setup local network. logger.info([ "%snear-%s" % (near_root, stable_branch), "--home=%s" % node_root, "testnet", "--v", "4", "--prefix", "test" ]) subprocess.call([ "%snear-%s" % (near_root, stable_branch), "--home=%s" % node_root, "testnet", "--v", "4", "--prefix", "test" ]) genesis_config_changes = [("epoch_length", 20), ("num_block_producer_seats", 10), ("num_block_producer_seats_per_shard", [10]), ("block_producer_kickout_threshold", 80), ("chunk_producer_kickout_threshold", 80), ("chain_id", "testnet")] node_dirs = [os.path.join(node_root, 'test%d' % i) for i in range(4)] for i, node_dir in enumerate(node_dirs): cluster.apply_genesis_changes(node_dir, genesis_config_changes) # Start 3 stable nodes and one current node. config = { "local": True, 'near_root': near_root, 'binary_name': "near-%s" % stable_branch } nodes = [ cluster.spin_up_node(config, near_root, node_dirs[0], 0, None, None) ] for i in range(1, 3): nodes.append( cluster.spin_up_node(config, near_root, node_dirs[i], i, nodes[0].node_key.pk, nodes[0].addr())) if os.getenv('NAYDUCK'): config["binary_name"] = "near" else: config["binary_name"] = "near-%s" % current_branch nodes.append( cluster.spin_up_node(config, near_root, node_dirs[3], 3, nodes[0].node_key.pk, nodes[0].addr())) time.sleep(2) # deploy a contract status = nodes[0].get_status() hash = status['sync_info']['latest_block_hash'] tx = sign_deploy_contract_tx(nodes[0].signer_key, load_test_contract(), 1, base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res # write some random value tx = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'write_random_value', [], 10**13, 0, 2, base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res wait_for_blocks_or_timeout(nodes[0], 20, 120) # Restart stable nodes into new version. for i in range(3): nodes[i].kill() nodes[i].binary_name = config['binary_name'] nodes[i].start(nodes[0].node_key.pk, nodes[0].addr()) wait_for_blocks_or_timeout(nodes[3], 60, 120) status0 = nodes[0].get_status() status3 = nodes[3].get_status() protocol_version = status0['protocol_version'] latest_protocol_version = status3["latest_protocol_version"] assert protocol_version == latest_protocol_version, \ "Latest protocol version %d should match active protocol version %d" % ( latest_protocol_version, protocol_version) hash = status0['sync_info']['latest_block_hash'] # write some random value again tx = sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'write_random_value', [], 10**13, 0, 4, base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res # hex_account_id = (b"I'm hex!" * 4).hex() hex_account_id = '49276d206865782149276d206865782149276d206865782149276d2068657821' tx = sign_payment_tx(key=nodes[0].signer_key, to=hex_account_id, amount=10**25, nonce=5, blockHash=base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) # Successfully created a new account on transfer to hex assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res hex_account_balance = int( nodes[0].get_account(hex_account_id)['result']['amount']) assert hex_account_balance == 10**25 hash = status0['sync_info']['latest_block_hash'] new_account_id = f'new.{nodes[0].signer_key.account_id}' new_signer_key = cluster.Key(new_account_id, nodes[0].signer_key.pk, nodes[0].signer_key.sk) create_account_tx = sign_create_account_with_full_access_key_and_balance_tx( nodes[0].signer_key, new_account_id, new_signer_key, 10**24, 6, base58.b58decode(hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(create_account_tx, timeout=20) # Successfully created a new account assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res hash = status0['sync_info']['latest_block_hash'] status = nodes[0].get_status() block_height = status['sync_info']['latest_block_height'] beneficiary_account_id = '1982374698376abd09265034ef35034756298375462323456294875193563756' tx = sign_delete_account_tx(key=new_signer_key, to=new_account_id, beneficiary=beneficiary_account_id, nonce=block_height * 1_000_000 - 1, block_hash=base58.b58decode( hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=20) # Successfully deleted an account assert 'error' not in res, res assert 'Failure' not in res['result']['status'], res