def __init__(self, instance_name, node_dir): super().__init__(instance_name) self.validator_key = Key.from_json_file( os.path.join(node_dir, "validator_key.json")) self.node_key = Key.from_json_file( os.path.join(node_dir, "node_key.json")) self.signer_key = Key.from_json_file( os.path.join(node_dir, "signer0_key.json")) self.last_synced_height = 0 try: status = super().get_status() validators = set( map(lambda x: x['account_id'], status['validators'])) if self.signer_key.account_id in validators: self.state = NodeState.VALIDATING elif status['sync_info']['syncing']: self.state = NodeState.SYNCING else: self.state = NodeState.NONVALIDATING self.account_key_nonce = super().get_nonce_for_pk( self.signer_key.account_id, self.signer_key.pk) except Exception: start_node(self.machine) time.sleep(20) self.state = NodeState.SYNCING self.account_key_nonce = None
def send_create_account_tx(self, new_account_id): self.prep_tx() new_key = Key(new_account_id, self.key.pk, self.key.sk) tx = sign_create_account_with_full_access_key_and_balance_tx( self.key, new_account_id, new_key, 100, self.nonce, self.base_block_hash) return self.send_tx(tx)
def create_account(source_account, base_block_hash): nonce = get_nonce_for_pk(source_account.account_id, source_account.pk) new_account_id = ''.join( random.choice(string.ascii_lowercase) for _ in range(0, 10)) new_key = Key(new_account_id, source_account.pk, source_account.sk) tx = sign_create_account_with_full_access_key_and_balance_tx( source_account, new_account_id, new_key, 100, nonce + 1, base_block_hash) send_tx(tx)
def get_test_accounts_from_args(): node_index = int(sys.argv[1]) pk = sys.argv[2] sk = sys.argv[3] test_account_keys = [ (Key(load_testing_account_id(i), pk, sk), i) for i in range(node_index * NUM_ACCOUNTS, (node_index + 1) * NUM_ACCOUNTS) ] base_block_hash = get_latest_block_hash() rpc_info = (LOCAL_ADDR, RPC_PORT) return [(Account(key, get_nonce_for_pk(key.account_id, key.pk), base_block_hash, rpc_info), i) for (key, i) in test_account_keys]
def get_validator_account(node): m = node.machine target_dir = create_target_dir(m) m.download(f'/home/ubuntu/.near/validator_key.json', target_dir) return Key.from_json_file(f'{target_dir}/validator_key.json')
sys.path.append('lib') from cluster import start_cluster, Key from transaction import sign_staking_tx EPOCH_LENGTH = 10 TIMEOUT = 60 client_config = {"network": {"ttl_account_id_router": {"secs": 0, "nanos": 100000000}}} nodes = start_cluster(2, 1, 1, None, [["epoch_length", EPOCH_LENGTH], ["block_producer_kickout_threshold", 10], ["chunk_producer_kickout_threshold", 10]], {1: client_config, 2: client_config}) time.sleep(2) nodes[2].kill() validator_key = Key(nodes[1].validator_key.account_id, nodes[2].signer_key.pk, nodes[2].signer_key.sk) nodes[2].reset_validator_key(validator_key) nodes[2].reset_data() nodes[2].start(nodes[0].node_key.pk, nodes[0].addr()) time.sleep(3) status = nodes[0].get_status() block_hash = status['sync_info']['latest_block_hash'] block_height = status['sync_info']['latest_block_height'] tx = sign_staking_tx(nodes[1].signer_key, validator_key, 50000000000000000000000000000000, 1, base58.b58decode(block_hash.encode('utf8'))) res = nodes[0].send_tx_and_wait(tx, timeout=15) assert 'error' not in res start_time = time.time()
start = time.time() while True: assert time.time() - start < TIMEOUT time.sleep(1) status1 = nodes[1].get_status() height1 = status1['sync_info']['latest_block_height'] cur_block = nodes[1].get_block(height1) if cur_block['result']['header']['epoch_id'] != epoch_id: break nodes[1].kill() seed = bytes([1] * 32) public_key, secret_key = nacl.bindings.crypto_sign_seed_keypair(seed) node_key = Key("", base58.b58encode(public_key).decode('utf-8'), base58.b58encode(secret_key).decode('utf-8')) nodes[1].reset_node_key(node_key) nodes[1].start(nodes[0].node_key.pk, nodes[0].addr()) time.sleep(2) start = time.time() while height1 < EPOCH_LENGTH * 2 + 5: assert time.time() - start < TIMEOUT * 2 time.sleep(1) status1 = nodes[1].get_status() height1 = status1['sync_info']['latest_block_height'] validators = nodes[1].get_validators() assert len( validators['result']['next_validators']
excess_transactions = total_tx_sent - (max_tps * elapsed_time) if excess_transactions > 0: delay = excess_transactions / max_tps elapsed_time += delay time.sleep(delay) return (total_tx_sent, elapsed_time) if __name__ == '__main__': node_index = int(sys.argv[1]) pk = sys.argv[2] sk = sys.argv[3] test_account_keys = [ (Key(load_testing_account_id(i), pk, sk), i) for i in range(node_index * NUM_ACCOUNTS, (node_index + 1) * NUM_ACCOUNTS) ] base_block_hash = get_latest_block_hash() rpc_info = (LOCAL_ADDR, RPC_PORT) test_accounts = [(Account(key, get_nonce_for_pk(key.account_id, key.pk), base_block_hash, rpc_info), i) for (key, i) in test_account_keys] i0 = test_accounts[0][1] start_time = time.time() # begin with only transfers for TPS measurement
def test_changes_with_new_account_with_access_key(): """ Plan: 1. Create a new account with an access key. 2. Observe the changes in the block where the receipt lands. 3. Remove the access key. 4. Observe the changes in the block where the receipt lands. """ # re-use the key as a new account access key new_key = Key( account_id='rpc_key_value_changes_full_access', pk=nodes[1].signer_key.pk, sk=nodes[1].signer_key.sk, ) # Step 1 status = nodes[0].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] create_account_tx = transaction.sign_create_account_with_full_access_key_and_balance_tx( creator_key=nodes[0].signer_key, new_account_id=new_key.account_id, new_key=new_key, balance=10**24, nonce=7, block_hash=base58.b58decode(latest_block_hash.encode('utf8'))) new_account_response = nodes[0].send_tx_and_wait(create_account_tx, 10) # Step 2 block_hash = new_account_response['result']['receipts_outcome'][0][ 'block_hash'] assert_changes_in_block_response(request={"block_id": block_hash}, expected_response={ "block_hash": block_hash, "changes": [{ "type": "account_touched", "account_id": new_key.account_id, }, { "type": "access_key_touched", "account_id": new_key.account_id, }] }) base_request = { "block_id": block_hash, "changes_type": "all_access_key_changes", } for request in [ # Test empty account_ids { **base_request, "account_ids": [] }, # Test an account_id that is a prefix of the original account_id. { **base_request, "account_ids": [new_key.account_id[:-1]] }, # Test an account_id that has the original account_id as a prefix. { **base_request, "account_ids": [new_key.account_id + '_extra'] }, ]: assert_changes_response(request=request, expected_response={ "block_hash": block_hash, "changes": [] }) # Test happy-path expected_response = { "block_hash": block_hash, "changes": [{ "cause": { "type": "receipt_processing", "receipt_hash": new_account_response["result"]["receipts_outcome"][0]["id"], }, "type": "access_key_update", "change": { "account_id": new_key.account_id, "public_key": new_key.pk, "access_key": { "nonce": 0, "permission": "FullAccess" }, } }] } for request in [ { "block_id": block_hash, "changes_type": "all_access_key_changes", "account_ids": [new_key.account_id], }, { "block_id": block_hash, "changes_type": "all_access_key_changes", "account_ids": [ new_key.account_id + '_non_existing1', new_key.account_id, new_key.account_id + '_non_existing2' ], }, ]: assert_changes_response(request=request, expected_response=expected_response) # Step 3 status = nodes[0].get_status() latest_block_hash = status['sync_info']['latest_block_hash'] delete_access_key_tx = transaction.sign_delete_access_key_tx( signer_key=new_key, target_account_id=new_key.account_id, key_for_deletion=new_key, nonce=8, block_hash=base58.b58decode(latest_block_hash.encode('utf8'))) delete_access_key_response = nodes[1].send_tx_and_wait( delete_access_key_tx, 10) # Step 4 block_hash = delete_access_key_response['result']['receipts_outcome'][0][ 'block_hash'] assert_changes_in_block_response(request={"block_id": block_hash}, expected_response={ "block_hash": block_hash, "changes": [{ "type": "account_touched", "account_id": new_key.account_id, }, { "type": "access_key_touched", "account_id": new_key.account_id, }] }) base_request = { "block_id": block_hash, "changes_type": "all_access_key_changes", } for request in [ # Test empty account_ids { **base_request, "account_ids": [] }, # Test an account_id that is a prefix of the original account_id { **base_request, "account_ids": [new_key.account_id[:-1]] }, # Test an account_id that has the original account_id as a prefix { **base_request, "account_ids": [new_key.account_id + '_extra'] }, # Test empty keys in single_access_key_changes request { "block_id": block_hash, "changes_type": "single_access_key_changes", "keys": [] }, # Test non-existing account_id { "block_id": block_hash, "changes_type": "single_access_key_changes", "keys": [ { "account_id": new_key.account_id + '_non_existing1', "public_key": new_key.pk }, ], }, # Test non-existing public_key for an existing account_id { "block_id": block_hash, "changes_type": "single_access_key_changes", "keys": [ { "account_id": new_key.account_id, "public_key": new_key.pk[:-3] + 'aaa' }, ], }, ]: assert_changes_response(request=request, expected_response={ "block_hash": block_hash, "changes": [] }) # Test happy-path expected_response = { "block_hash": block_hash, "changes": [{ "cause": { 'type': 'transaction_processing', 'tx_hash': delete_access_key_response['result']['transaction']['hash'], }, "type": "access_key_update", "change": { "account_id": new_key.account_id, "public_key": new_key.pk, "access_key": { "nonce": 8, "permission": "FullAccess" }, } }, { "cause": { "type": "receipt_processing", "receipt_hash": delete_access_key_response["result"]["receipts_outcome"][0] ["id"] }, "type": "access_key_deletion", "change": { "account_id": new_key.account_id, "public_key": new_key.pk, } }] } for request in [ { "block_id": block_hash, "changes_type": "all_access_key_changes", "account_ids": [new_key.account_id], }, { "block_id": block_hash, "changes_type": "all_access_key_changes", "account_ids": [ new_key.account_id + '_non_existing1', new_key.account_id, new_key.account_id + '_non_existing2' ], }, { "block_id": block_hash, "changes_type": "single_access_key_changes", "keys": [{ "account_id": new_key.account_id, "public_key": new_key.pk }], }, { "block_id": block_hash, "changes_type": "single_access_key_changes", "keys": [ { "account_id": new_key.account_id + '_non_existing1', "public_key": new_key.pk }, { "account_id": new_key.account_id, "public_key": new_key.pk }, ], }, ]: assert_changes_response(request=request, expected_response=expected_response)
1, 1, 1, None, [["epoch_length", EPOCH_LENGTH], ["block_producer_kickout_threshold", 10], ["chunk_producer_kickout_threshold", 10]], {1: node1_config}) time.sleep(2) nodes[1].kill() print('node1 is killed') status = nodes[0].get_status() block_hash = status['sync_info']['latest_block_hash'] cur_height = status['sync_info']['latest_block_height'] num_new_accounts = 10 balance = 50000000000000000000000000000000 account_keys = [] for i in range(num_new_accounts): signer_key = Key(f'test_account{i}', nodes[0].signer_key.pk, nodes[0].signer_key.sk) create_account_tx = sign_create_account_with_full_access_key_and_balance_tx( nodes[0].signer_key, f'test_account{i}', signer_key, balance // num_new_accounts, i + 1, base58.b58decode(block_hash.encode('utf8'))) account_keys.append(signer_key) res = nodes[0].send_tx_and_wait(create_account_tx, timeout=15) assert 'error' not in res, res target_height = 50 while cur_height < target_height: status = nodes[0].get_status() cur_height = status['sync_info']['latest_block_height'] time.sleep(1) status = nodes[0].get_status()