def run_test(self): node = self.nodes[0] node.generate(30) node.generate(100) fee = Decimal('10000') / COIN wallet_unspent = node.listunspent() for test_case in TESTCASES: num_inputs = len(test_case['sig_hash_types']) spent_outputs = wallet_unspent[:num_inputs] del wallet_unspent[:num_inputs] assert len(spent_outputs) == num_inputs total_input_amount = sum(output['amount'] for output in spent_outputs) max_output_amount = (total_input_amount - fee) / test_case['outputs'] tx = CTransaction() for i in range(test_case['outputs']): # Make sure each UTXO is unique output_amount = max_output_amount - i * Decimal('0.000047') output_script = CScript( [OP_HASH160, i.to_bytes(20, 'big'), OP_EQUAL]) tx.vout.append(CTxOut(int(output_amount * COIN), output_script)) # Build deserialized outputs so we can compute the sighash below spent_outputs_deser = [] for spent_output in spent_outputs: tx.vin.append( CTxIn( COutPoint(int(spent_output['txid'], 16), spent_output['vout']), CScript())) spent_outputs_deser.append( CTxOut( int(spent_output['amount'] * COIN), CScript(bytes.fromhex(spent_output['scriptPubKey'])))) unsigned_tx = tx.serialize().hex() for i, sig_hash_type in enumerate(test_case['sig_hash_types']): # Sign transaction using wallet raw_signed_tx = node.signrawtransactionwithwallet( unsigned_tx, None, sig_hash_type)['hex'] # Extract signature and pubkey from scriptSig signed_tx = CTransaction() signed_tx.deserialize(io.BytesIO(bytes.fromhex(raw_signed_tx))) stack_items = list(CScript(signed_tx.vin[i].scriptSig)) sig = stack_items[0] pubkey = ECPubKey() pubkey.set(stack_items[1]) sig_hash_type_int = self.parse_sig_hash_type(sig_hash_type) # Build expected sighash sighash = SignatureHashLotus( tx_to=tx, spent_utxos=spent_outputs_deser, sig_hash_type=sig_hash_type_int, input_index=i, executed_script_hash=hash256( spent_outputs_deser[i].scriptPubKey), ) # Verify sig signs the above sighash and has the expected sighash type assert pubkey.verify_ecdsa(sig[:-1], sighash) assert sig[-1] == sig_hash_type_int
def run_test(self): node0, node1, node2 = self.nodes self.check_addmultisigaddress_errors() self.log.info('Generating blocks ...') node0.generate(149) self.sync_all() self.moved = 0 for self.nkeys in [3, 5]: for self.nsigs in [2, 3]: for self.output_type in ["bech32", "p2sh-segwit", "legacy"]: self.get_keys() self.do_multisig() self.checkbalances() # Test mixed compressed and uncompressed pubkeys self.log.info('Mixed compressed and uncompressed multisigs are not allowed') pk0 = node0.getaddressinfo(node0.getnewaddress())['pubkey'] pk1 = node1.getaddressinfo(node1.getnewaddress())['pubkey'] pk2 = node2.getaddressinfo(node2.getnewaddress())['pubkey'] # decompress pk2 pk_obj = ECPubKey() pk_obj.set(bytes.fromhex(pk2)) pk_obj.compressed = False pk2 = pk_obj.get_bytes().hex() node0.createwallet(wallet_name='wmulti0', disable_private_keys=True) wmulti0 = node0.get_wallet_rpc('wmulti0') # Check all permutations of keys because order matters apparently for keys in itertools.permutations([pk0, pk1, pk2]): # Results should be the same as this legacy one legacy_addr = node0.createmultisig(2, keys, 'legacy')['address'] assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'legacy')['address']) # Generate addresses with the segwit types. These should all make legacy addresses assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'bech32')['address']) assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'p2sh-segwit')['address']) assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'bech32')['address']) assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address']) self.log.info('Testing sortedmulti descriptors with BIP 67 test vectors') with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f: vectors = json.load(f) for t in vectors: key_str = ','.join(t['keys']) desc = descsum_create('sh(sortedmulti(2,{}))'.format(key_str)) assert_equal(self.nodes[0].deriveaddresses(desc)[0], t['address']) sorted_key_str = ','.join(t['sorted_keys']) sorted_key_desc = descsum_create('sh(multi(2,{}))'.format(sorted_key_str)) assert_equal(self.nodes[0].deriveaddresses(sorted_key_desc)[0], t['address']) # Check that bech32m is currently not allowed assert_raises_rpc_error(-5, "createmultisig cannot create bech32m multisig addresses", self.nodes[0].createmultisig, 2, self.pub, "bech32m")
def run_test(self): node0, node1, node2 = self.nodes self.check_addmultisigaddress_errors() self.log.info('Generating blocks ...') node0.generate(149) self.sync_all() self.moved = 0 for self.nkeys in [3, 5]: for self.nsigs in [2, 3]: self.get_keys() self.do_multisig() self.checkbalances() # Test mixed compressed and uncompressed pubkeys self.log.info( 'Mixed compressed and uncompressed multisigs are not allowed') pk0 = node0.getaddressinfo(node0.getnewaddress())['pubkey'] pk1 = node1.getaddressinfo(node1.getnewaddress())['pubkey'] pk2 = node2.getaddressinfo(node2.getnewaddress())['pubkey'] # decompress pk2 pk_obj = ECPubKey() pk_obj.set(binascii.unhexlify(pk2)) pk_obj.compressed = False pk2 = binascii.hexlify(pk_obj.get_bytes()).decode() # Check all permutations of keys because order matters apparently for keys in itertools.permutations([pk0, pk1, pk2]): # Results should be the same as this legacy one legacy_addr = node0.createmultisig(2, keys)['address'] assert_equal(legacy_addr, node0.addmultisigaddress(2, keys, '')['address']) # Generate addresses with the segwit types. These should all make # legacy addresses assert_equal(legacy_addr, node0.createmultisig(2, keys)['address']) self.log.info( 'Testing sortedmulti descriptors with BIP 67 test vectors') with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f: vectors = json.load(f) for t in vectors: key_str = ','.join(t['keys']) desc = descsum_create('sh(sortedmulti(2,{}))'.format(key_str)) assert_equal(self.nodes[0].deriveaddresses(desc)[0], t['address']) sorted_key_str = ','.join(t['sorted_keys']) sorted_key_desc = descsum_create( 'sh(multi(2,{}))'.format(sorted_key_str)) assert_equal(self.nodes[0].deriveaddresses(sorted_key_desc)[0], t['address'])
def check_avahello(args): # Restart the node with the given args self.restart_node(0, self.extra_args[0] + args) peer = get_ava_p2p_interface(node) avahello = peer.wait_for_avahello().hello avakey = ECPubKey() avakey.set(bytes.fromhex(node.getavalanchekey())) assert avakey.verify_schnorr(avahello.sig, avahello.get_sighash(peer))
def run_test(self): node0, node1, node2 = self.nodes self.check_addmultisigaddress_errors() self.log.info('Generating blocks ...') node0.generate(149) self.sync_all() self.moved = 0 for self.nkeys in [3, 5]: for self.nsigs in [2, 3]: for self.output_type in ["bech32", "p2sh-segwit", "legacy"]: self.get_keys() self.do_multisig() self.checkbalances() # Test mixed compressed and uncompressed pubkeys self.log.info( 'Mixed compressed and uncompressed multisigs are not allowed') pk0 = node0.getaddressinfo(node0.getnewaddress())['pubkey'] pk1 = node1.getaddressinfo(node1.getnewaddress())['pubkey'] pk2 = node2.getaddressinfo(node2.getnewaddress())['pubkey'] # decompress pk2 pk_obj = ECPubKey() pk_obj.set(binascii.unhexlify(pk2)) pk_obj.compressed = False pk2 = binascii.hexlify(pk_obj.get_bytes()).decode() # Check all permutations of keys because order matters apparently for keys in itertools.permutations([pk0, pk1, pk2]): # Results should be the same as this legacy one legacy_addr = node0.createmultisig(2, keys, 'legacy')['address'] assert_equal( legacy_addr, node0.addmultisigaddress(2, keys, '', 'legacy')['address']) # Generate addresses with the segwit types. These should all make legacy addresses assert_equal(legacy_addr, node0.createmultisig(2, keys, 'bech32')['address']) assert_equal( legacy_addr, node0.createmultisig(2, keys, 'p2sh-segwit')['address']) assert_equal( legacy_addr, node0.addmultisigaddress(2, keys, '', 'bech32')['address']) assert_equal( legacy_addr, node0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address'])
def run_test(self): node = self.nodes[0] # Build a fake quorum of nodes. def get_quorum(): def get_node(): n = TestNode() node.add_p2p_connection( n, services=NODE_NETWORK | NODE_AVALANCHE) n.wait_for_verack() # Get our own node id so we can use it later. n.nodeid = node.getpeerinfo()[-1]['id'] return n return [get_node() for _ in range(0, 16)] # Pick on node from the quorum for polling. quorum = get_quorum() poll_node = quorum[0] # Generate many block and poll for them. address = node.get_deterministic_priv_key().address blocks = node.generatetoaddress(100, address) def get_coinbase(h): b = node.getblock(h, 2) return { 'height': b['height'], 'txid': b['tx'][0]['txid'], 'n': 0, 'value': b['tx'][0]['vout'][0]['value'], } coinbases = [get_coinbase(h) for h in blocks] fork_node = self.nodes[1] # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Get the key so we can verify signatures. avakey = ECPubKey() avakey.set(bytes.fromhex(node.getavalanchekey())) self.log.info("Poll for the chain tip...") best_block_hash = int(node.getbestblockhash(), 16) poll_node.send_poll([best_block_hash]) def assert_response(expected): response = poll_node.wait_for_avaresponse() r = response.response assert_equal(r.cooldown, 0) # Verify signature. assert avakey.verify_schnorr(response.sig, r.get_hash()) votes = r.votes assert_equal(len(votes), len(expected)) for i in range(0, len(votes)): assert_equal(repr(votes[i]), repr(expected[i])) assert_response([AvalancheVote(BLOCK_ACCEPTED, best_block_hash)]) self.log.info("Poll for a selection of blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(1), 16), int(node.getblockhash(10), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), int(node.getblockhash(96), 16), int(node.getblockhash(99), 16), int(node.getblockhash(100), 16), ] poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes]) self.log.info( "Poll for a selection of blocks, but some are now invalid...") invalidated_block = node.getblockhash(76) node.invalidateblock(invalidated_block) # We need to send the coin to a new address in order to make sure we do # not regenerate the same block. node.generatetoaddress( 26, 'bchreg:pqv2r67sgz3qumufap3h2uuj0zfmnzuv8v7ej0fffv') node.reconsiderblock(invalidated_block) poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:5]] + [AvalancheVote(BLOCK_FORK, h) for h in various_block_hashes[-3:]]) self.log.info("Poll for unknown blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), various_block_hashes[5], various_block_hashes[6], various_block_hashes[7], random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), ] poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:3]] + [AvalancheVote(BLOCK_FORK, h) for h in various_block_hashes[3:6]] + [AvalancheVote(BLOCK_UNKNOWN, h) for h in various_block_hashes[-3:]]) self.log.info("Trigger polling from the node...") # duplicate the deterministic sig test from src/test/key_tests.cpp privkey = ECKey() privkey.set(bytes.fromhex( "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747"), True) pubkey = privkey.get_pubkey() privatekey = node.get_deterministic_priv_key().key proof = node.buildavalancheproof(11, 12, pubkey.get_bytes().hex(), [{ 'txid': coinbases[0]['txid'], 'vout': coinbases[0]['n'], 'amount': coinbases[0]['value'], 'height': coinbases[0]['height'], 'iscoinbase': True, 'privatekey': privatekey, }]) # Activate the quorum. for n in quorum: success = node.addavalanchenode( n.nodeid, pubkey.get_bytes().hex(), proof) assert success is True def can_find_block_in_poll(hash, resp=BLOCK_ACCEPTED): found_hash = False for n in quorum: poll = n.get_avapoll_if_available() # That node has not received a poll if poll is None: continue # We got a poll, check for the hash and repond votes = [] for inv in poll.invs: # Vote yes to everything r = BLOCK_ACCEPTED # Look for what we expect if inv.hash == hash: r = resp found_hash = True votes.append(AvalancheVote(r, inv.hash)) n.send_avaresponse(poll.round, votes, privkey) return found_hash # Now that we have a peer, we should start polling for the tip. hash_tip = int(node.getbestblockhash(), 16) wait_until(lambda: can_find_block_in_poll(hash_tip), timeout=5) # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Create a fork 2 blocks deep. This should trigger polling. fork_node.invalidateblock(fork_node.getblockhash(100)) fork_address = fork_node.get_deterministic_priv_key().address fork_node.generatetoaddress(2, fork_address) # Because the new tip is a deep reorg, the node will not accept it # right away, but poll for it. def parked_block(blockhash): for tip in node.getchaintips(): if tip["hash"] == blockhash: assert tip["status"] != "active" return tip["status"] == "parked" return False fork_tip = fork_node.getbestblockhash() wait_until(lambda: parked_block(fork_tip)) self.log.info("Answer all polls to finalize...") hash_to_find = int(fork_tip, 16) def has_accepted_new_tip(): can_find_block_in_poll(hash_to_find) return node.getbestblockhash() == fork_tip # Because everybody answers yes, the node will accept that block. wait_until(has_accepted_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) self.log.info("Answer all polls to park...") node.generate(1) tip_to_park = node.getbestblockhash() hash_to_find = int(tip_to_park, 16) assert(tip_to_park != fork_tip) def has_parked_new_tip(): can_find_block_in_poll(hash_to_find, BLOCK_PARKED) return node.getbestblockhash() == fork_tip # Because everybody answers no, the node will park that block. wait_until(has_parked_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) # Restart the node and rebuild the quorum self.restart_node(0, self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ]) quorum = get_quorum() poll_node = quorum[0] # Check the avahello is consistent avahello = poll_node.wait_for_avahello().hello avakey.set(bytes.fromhex(node.getavalanchekey())) assert avakey.verify_schnorr( avahello.sig, avahello.get_sighash(poll_node))
def run_test(self): nodes = self.nodes self.import_genesis_coins_a(nodes[0]) txnHashes = [] nodes[1].extkeyimportmaster( 'drip fog service village program equip minute dentist series hawk crop sphere olympic lazy garbage segment fox library good alley steak jazz force inmate' ) sxAddrTo1_1 = nodes[1].getnewstealthaddress('lblsx11') assert ( sxAddrTo1_1 == 'TetbYTGv5LiqyFiUD3a5HHbpSinQ9KiRYDGAMvRzPfz4RnHMbKGAwDr1fjLGJ5Eqg1XDwpeGyqWMiwdK3qM3zKWjzHNpaatdoHVzzA' ) nodes[2].extkeyimportmaster(nodes[2].mnemonic('new')['master']) sxAddrTo0_1 = nodes[0].getnewstealthaddress('lblsx01') txnHashes.append(nodes[0].sendghosttoanon(sxAddrTo1_1, 1, '', '', False, 'node0 -> node1 p->a')) txnHashes.append(nodes[0].sendghosttoblind(sxAddrTo0_1, 1000, '', '', False, 'node0 -> node0 p->b')) txnHashes.append(nodes[0].sendblindtoanon(sxAddrTo1_1, 100, '', '', False, 'node0 -> node1 b->a 1')) txnHashes.append(nodes[0].sendblindtoanon(sxAddrTo1_1, 100, '', '', False, 'node0 -> node1 b->a 2')) txnHashes.append(nodes[0].sendblindtoanon(sxAddrTo1_1, 100, '', '', False, 'node0 -> node1 b->a 3')) txnHashes.append(nodes[0].sendblindtoanon(sxAddrTo1_1, 10, '', '', False, 'node0 -> node1 b->a 4')) for k in range(5): txnHash = nodes[0].sendghosttoanon(sxAddrTo1_1, 10, '', '', False, 'node0 -> node1 p->a') txnHashes.append(txnHash) for k in range(10): txnHash = nodes[0].sendblindtoanon(sxAddrTo1_1, 10, '', '', False, 'node0 -> node1 b->a') txnHashes.append(txnHash) for h in txnHashes: assert (self.wait_for_mempool(nodes[1], h)) assert ('node0 -> node1 b->a 4' in self.dumpj(nodes[1].listtransactions('*', 100))) assert ('node0 -> node1 b->a 4' in self.dumpj(nodes[0].listtransactions('*', 100))) self.stakeBlocks(2) block1_hash = nodes[1].getblockhash(1) ro = nodes[1].getblock(block1_hash) for txnHash in txnHashes: assert (txnHash in ro['tx']) txnHash = nodes[1].sendanontoanon(sxAddrTo0_1, 1, '', '', False, 'node1 -> node0 a->a') txnHashes = [ txnHash, ] assert (self.wait_for_mempool(nodes[0], txnHash)) self.stakeBlocks(1) ro = nodes[1].getblock(nodes[1].getblockhash(3)) for txnHash in txnHashes: assert (txnHash in ro['tx']) assert (nodes[1].anonoutput()['lastindex'] == 28) txnHashes.clear() txnHashes.append(nodes[1].sendanontoanon(sxAddrTo0_1, 101, '', '', False, 'node1 -> node0 a->a', 5, 1)) txnHashes.append(nodes[1].sendanontoanon(sxAddrTo0_1, 0.1, '', '', False, '', 5, 2)) assert (nodes[1].getwalletinfo()['anon_balance'] > 10) outputs = [ { 'address': sxAddrTo0_1, 'amount': 10, 'subfee': True }, ] ro = nodes[1].sendtypeto('anon', 'part', outputs, 'comment_to', 'comment_from', 4, 32, True) assert (ro['bytes'] > 0) txnHashes.append(nodes[1].sendtypeto('anon', 'part', outputs)) txnHashes.append(nodes[1].sendtypeto('anon', 'anon', [ { 'address': sxAddrTo1_1, 'amount': 1 }, ])) for txhash in txnHashes: assert (self.wait_for_mempool(nodes[0], txhash)) self.log.info('Test filtertransactions with type filter') ro = nodes[1].filtertransactions({ 'type': 'anon', 'count': 20, 'show_anon_spends': True, 'show_change': True }) assert (len(ro) > 2) foundTx = 0 for t in ro: if t['txid'] == txnHashes[-1]: foundTx += 1 assert (t['amount'] == t['fee']) elif t['txid'] == txnHashes[-2]: foundTx += 1 assert ('anon_inputs' in t) assert (t['amount'] < -9.9 and t['amount'] > -10.0) n_standard = 0 n_anon = 0 for to in t['outputs']: if to['type'] == 'standard': n_standard += 1 elif to['type'] == 'anon': n_anon += 1 assert (to['is_change'] == 'true') assert (n_standard == 1) assert (n_anon > 0) assert (t['type_in'] == 'anon') if t['txid'] == txnHashes[-3]: foundTx += 1 assert (t['outputs'][0]['type'] == 'anon') if foundTx > 2: break assert (foundTx > 2) self.log.info('Test unspent with address filter') unspent_filtered = nodes[1].listunspentanon(1, 9999, [sxAddrTo1_1]) assert (unspent_filtered[0]['label'] == 'lblsx11') self.log.info('Test permanent lockunspent') unspent = nodes[1].listunspentanon() assert (nodes[1].lockunspent(False, [unspent[0]], True) == True) assert (nodes[1].lockunspent(False, [unspent[1]], True) == True) assert (len(nodes[1].listlockunspent()) == 2) locked_balances = nodes[1].getlockedbalances() assert (locked_balances['trusted_anon'] > 0.0) assert (locked_balances['num_locked'] == 2) # Restart node self.sync_all() self.stop_node(1) self.start_node(1, self.extra_args[1] + [ '-wallet=default_wallet', ]) self.connect_nodes_bi(0, 1) assert (len(nodes[1].listlockunspent()) == 2) assert (len(nodes[1].listunspentanon()) < len(unspent)) assert (nodes[1].lockunspent(True, [unspent[0]]) == True) assert_raises_rpc_error(-8, 'Invalid parameter, expected locked output', nodes[1].lockunspent, True, [unspent[0]]) assert (len(nodes[1].listunspentanon()) == len(unspent) - 1) assert (nodes[1].lockunspent(True) == True) assert (len(nodes[1].listunspentanon()) == len(unspent)) assert (nodes[1].lockunspent(True) == True) ro = nodes[2].getblockstats(nodes[2].getblockchaininfo()['blocks']) assert (ro['height'] == 3) self.log.info('Test recover from mnemonic') # Txns currently in the mempool will be reprocessed in the next block self.stakeBlocks(1) wi_1 = nodes[1].getwalletinfo() nodes[1].createwallet('test_import') w1_2 = nodes[1].get_wallet_rpc('test_import') w1_2.extkeyimportmaster( 'drip fog service village program equip minute dentist series hawk crop sphere olympic lazy garbage segment fox library good alley steak jazz force inmate' ) w1_2.getnewstealthaddress('lblsx11') w1_2.rescanblockchain(0) wi_1_2 = w1_2.getwalletinfo() assert (wi_1_2['anon_balance'] == wi_1['anon_balance']) nodes[1].createwallet('test_import_locked') w1_3 = nodes[1].get_wallet_rpc('test_import_locked') w1_3.encryptwallet('test') assert_raises_rpc_error( -13, 'Error: Wallet locked, please enter the wallet passphrase with walletpassphrase first.', w1_3.filtertransactions, {'show_blinding_factors': True}) assert_raises_rpc_error( -13, 'Error: Wallet locked, please enter the wallet passphrase with walletpassphrase first.', w1_3.filtertransactions, {'show_anon_spends': True}) w1_3.walletpassphrase('test', 30) # Skip initial rescan by passing -1 as scan_chain_from w1_3.extkeyimportmaster( 'drip fog service village program equip minute dentist series hawk crop sphere olympic lazy garbage segment fox library good alley steak jazz force inmate', '', False, 'imported key', 'imported acc', -1) w1_3.getnewstealthaddress('lblsx11') w1_3.walletsettings('other', {'onlyinstance': False}) w1_3.walletlock() assert (w1_3.getwalletinfo()['encryptionstatus'] == 'Locked') w1_3.rescanblockchain(0) w1_3.walletpassphrase('test', 30) wi_1_3 = w1_3.getwalletinfo() assert (wi_1_3['anon_balance'] == wi_1['anon_balance']) # Coverage w1_3.sendanontoblind(sxAddrTo0_1, 1.0) w1_3.sendanontoghost(sxAddrTo0_1, 1.0) self.log.info('Test sendtypeto coincontrol') w1_inputs = w1_2.listunspentanon() assert (len(w1_inputs) > 1) use_input = w1_inputs[random.randint(0, len(w1_inputs) - 1)] coincontrol = { 'inputs': [{ 'tx': use_input['txid'], 'n': use_input['vout'] }] } txid = w1_2.sendtypeto('anon', 'anon', [ { 'address': sxAddrTo0_1, 'amount': 0.01 }, ], '', '', 7, 1, False, coincontrol) w1_inputs_after = w1_2.listunspentanon() for txin in w1_inputs_after: if txin['txid'] == use_input['txid'] and txin['vout'] == use_input[ 'vout']: raise ValueError('Output should be spent') assert (self.wait_for_mempool(nodes[1], txid)) raw_tx = w1_2.getrawtransaction(txid, True) possible_inputs = raw_tx['vin'][0]['ring_row_0'].split(', ') possible_inputs_txids = [] for pi in possible_inputs: anonoutput = w1_2.anonoutput(pi) possible_inputs_txids.append(anonoutput['txnhash'] + '.' + str(anonoutput['n'])) assert (use_input['txid'] + '.' + str(use_input['vout']) in possible_inputs_txids) num_tries = 20 for i in range(num_tries): if nodes[0].getbalances()['mine']['anon_immature'] == 0.0: break self.stakeBlocks(1) if i >= num_tries - 1: raise ValueError('anon balance immature') assert (nodes[0].getbalances()['mine']['anon_trusted'] > 100.0) self.log.info('Test crafting anon transactions.') sxAddr2_1 = nodes[2].getnewstealthaddress('lblsx01') ephem = nodes[0].derivefromstealthaddress(sxAddr2_1) blind = bytes(random.getrandbits(8) for i in range(32)).hex() outputs = [ { 'address': sxAddr2_1, 'type': 'anon', 'amount': 10.0, 'blindingfactor': blind, 'ephemeral_key': ephem['ephemeral_privatekey'], }, ] tx = nodes[0].createrawparttransaction([], outputs) options = {'sign_tx': True} tx_signed = nodes[0].fundrawtransactionfrom('anon', tx['hex'], {}, tx['amounts'], options) txid = nodes[0].sendrawtransaction(tx_signed['hex']) self.stakeBlocks(1) sx_privkey = nodes[2].dumpprivkey(sxAddr2_1) assert ('scan_secret' in sx_privkey) assert ('spend_secret' in sx_privkey) sx_pubkey = nodes[2].getaddressinfo(sxAddr2_1) assert ('scan_public_key' in sx_pubkey) assert ('spend_public_key' in sx_pubkey) stealth_key = nodes[2].derivefromstealthaddress( sxAddr2_1, ephem['ephemeral_pubkey']) prevtx = nodes[2].decoderawtransaction(tx_signed['hex']) found_output = -1 for vout in prevtx['vout']: if vout['type'] != 'anon': continue try: ro = nodes[2].verifycommitment(vout['valueCommitment'], blind, 10.0) assert (ro['result'] is True) ro = nodes[2].rewindrangeproof(vout['rangeproof'], vout['valueCommitment'], stealth_key['privatekey'], ephem['ephemeral_pubkey']) assert (ro['amount'] == 10.0) found_output = vout['n'] except Exception as e: if not str(e).startswith('Mismatched commitment'): print(e) assert (found_output > -1) key_bytes = base58_to_byte(stealth_key['privatekey'])[0][0:32] epk = ECPubKey() epk.set(bytes.fromhex(ephem['ephemeral_pubkey'])) self.log.info('Test rewindrangeproof with final nonce') # ECDH P = SECP256K1.affine(epk.p) M = SECP256K1.affine( SECP256K1.mul([((P[0], P[1], P[2]), int.from_bytes(key_bytes, 'big'))])) eM = bytes([0x02 + (M[1] & 1)]) + M[0].to_bytes(32, 'big') hM = sha256(eM) hhM = sha256(hM) # Reverse, SetHex is LE hhM = hhM[::-1] vout = prevtx['vout'][found_output] ro = nodes[2].rewindrangeproof(vout['rangeproof'], vout['valueCommitment'], hhM.hex()) assert (ro['amount'] == 10.0) self.log.info( 'Test signing for unowned anon input' ) # Input not in wallet, must be in chain for pubkey index prev_tx_signed = nodes[0].decoderawtransaction(tx_signed['hex']) prev_commitment = prev_tx_signed['vout'][found_output][ 'valueCommitment'] prev_public_key = prev_tx_signed['vout'][found_output]['pubkey'] assert (prev_public_key == stealth_key['pubkey']) outputs = [ { 'address': sxAddr2_1, 'type': 'anon', 'amount': 10.0, }, ] tx = nodes[0].createrawparttransaction([], outputs) options = { 'subtractFeeFromOutputs': [ 0, ], 'inputs': [{ 'tx': txid, 'n': found_output, 'type': 'anon', 'value': 10.0, 'commitment': prev_commitment, 'pubkey': prev_public_key, 'privkey': stealth_key['privatekey'], 'blind': blind, }], 'feeRate': 0.001, 'sign_tx': True, } input_amounts = {} used_input = (txid, found_output) tx_signed = nodes[0].fundrawtransactionfrom('anon', tx['hex'], input_amounts, tx['amounts'], options) num_tries = 20 for i in range(num_tries): try: spending_txid = nodes[0].sendrawtransaction(tx_signed['hex']) break except Exception: self.stakeBlocks(1) if i >= num_tries - 1: raise ValueError('Can\'t submit txn') assert (self.wait_for_mempool(nodes[2], spending_txid)) self.stakeBlocks(1) w2b = nodes[2].getbalances() assert (w2b['mine']['anon_immature'] < 10 and w2b['mine']['anon_immature'] > 9) self.log.info('Test subfee edge case') unspents = nodes[0].listunspent() total_input = int(unspents[0]['amount'] * COIN) + int( unspents[1]['amount'] * COIN) total_output = total_input - 1 coincontrol = { 'test_mempool_accept': True, 'show_hex': True, 'show_fee': True, 'inputs': [{ 'tx': unspents[0]['txid'], 'n': unspents[0]['vout'] }, { 'tx': unspents[1]['txid'], 'n': unspents[1]['vout'] }] } outputs = [ { 'address': sxAddrTo0_1, 'amount': '%i.%08i' % (total_output // COIN, total_output % COIN), 'narr': '', 'subfee': True }, ] tx = nodes[0].sendtypeto('part', 'anon', outputs, 'comment', 'comment-to', 5, 1, False, coincontrol) assert (total_input == int(tx['fee'] * COIN) + int(tx['outputs_fee'][sxAddrTo0_1])) assert (tx['mempool-allowed'] == True) self.log.info('Test checkkeyimage') unspents = nodes[0].listunspentanon(0, 999999, [], True, {'show_pubkeys': True}) anon_pubkey = unspents[0]['pubkey'] keyimage = nodes[0].getkeyimage(anon_pubkey)['keyimage'] spent = nodes[0].checkkeyimage(keyimage) assert (spent['spent'] is False) raw_tx = nodes[0].decoderawtransaction(nodes[0].gettransaction( used_input[0])['hex']) used_pubkey = raw_tx['vout'][used_input[1]]['pubkey'] used_keyimage = nodes[2].getkeyimage(used_pubkey)['keyimage'] spent = nodes[0].checkkeyimage(used_keyimage) assert (spent['spent'] is True) assert (spent['txid'] == spending_txid) self.log.info('Test rollbackrctindex') nodes[0].rollbackrctindex()
def run_test(self): node = self.nodes[0] node.add_p2p_connection(P2PDataStore()) # Allocate as many UTXOs as are needed num_utxos = sum( len(tx_case['inputs']) for tx_case in TX_CASES if isinstance(tx_case, dict)) value = int(SUBSIDY * 1_000_000) fee = 10_000 pubkey_bytes = bytes.fromhex( '020000000000000000000000000000000000000000000000000000000000000001' ) pubkey = ECPubKey() pubkey.set(pubkey_bytes) max_utxo_value = (value - fee) // num_utxos spendable_outputs = [] utxo_idx = 0 # Prepare UTXOs for the tests below for tx_case in TX_CASES: if tx_case == 'ENABLE_REPLAY_PROTECTION': continue for tree, leaf_idx, _ in tx_case['inputs']: utxo_value = max_utxo_value - utxo_idx * 100 # deduct 100*i coins for unique amounts tree_result = taproot_tree_helper(tree) merkle_root = tree_result['hash'] tweak_hash = TaggedHash("TapTweak", pubkey_bytes + merkle_root) commitment = pubkey.add(tweak_hash) ops = [OP_SCRIPTTYPE, OP_1, commitment.get_bytes()] script_case = tree_result['items'][leaf_idx]['leaf'][ 'script_case'] if script_case.get('state', False): ops.append(script_case['state']) utxo_script = CScript(ops) spendable_outputs.append(CTxOut(utxo_value, utxo_script)) utxo_idx += 1 anyonecanspend_address = node.decodescript('51')['p2sh'] burn_address = node.decodescript('00')['p2sh'] p2sh_script = CScript([OP_HASH160, bytes(20), OP_EQUAL]) node.generatetoaddress(1, anyonecanspend_address) node.generatetoaddress(100, burn_address) # Build and send fan-out transaction creating all the UTXOs block_hash = node.getblockhash(1) coin = int(node.getblock(block_hash)['tx'][0], 16) tx_fan_out = CTransaction() tx_fan_out.vin.append(CTxIn(COutPoint(coin, 1), CScript([b'\x51']))) tx_fan_out.vout = spendable_outputs tx_fan_out.rehash() node.p2p.send_txs_and_test([tx_fan_out], node) utxo_idx = 0 for tx_case in TX_CASES: if tx_case == 'ENABLE_REPLAY_PROTECTION': node.setmocktime(ACTIVATION_TIME) node.generatetoaddress(11, burn_address) continue num_inputs = len(tx_case['inputs']) num_outputs = tx_case['outputs'] # Build tx for this test, will broadcast later tx = CTransaction() spent_outputs = spendable_outputs[:num_inputs] del spendable_outputs[:num_inputs] assert len(spent_outputs) == num_inputs total_input_amount = sum(output.nValue for output in spent_outputs) max_output_amount = (total_input_amount - fee) // num_outputs for i in range(num_outputs): output_amount = max_output_amount - i * 77 output_script = CScript( [OP_HASH160, i.to_bytes(20, 'big'), OP_EQUAL]) tx.vout.append(CTxOut(output_amount, output_script)) for _ in range(num_inputs): tx.vin.append( CTxIn(COutPoint(tx_fan_out.txid, utxo_idx), CScript())) utxo_idx += 1 for input_idx, input_case in enumerate(tx_case['inputs']): tree, leaf_idx, sig_hash_types = input_case tree_result = taproot_tree_helper(tree) result_item = tree_result['items'][leaf_idx] leaf = result_item['leaf'] script_case = leaf['script_case'] exec_script = CScript(script_case['script']) keys = script_case.get('keys', []) assert len(sig_hash_types) == len(keys) sigs = [] for sig_hash_type, key in zip(sig_hash_types, keys): if sig_hash_type & SIGHASH_LOTUS == SIGHASH_LOTUS: sighash = SignatureHashLotus( tx_to=tx, spent_utxos=spent_outputs, sig_hash_type=sig_hash_type, input_index=input_idx, executed_script_hash=leaf['tapleaf_hash'], codeseparator_pos=script_case.get( 'codesep', 0xffff_ffff), ) elif sig_hash_type & SIGHASH_FORKID: sighash = SignatureHashForkId( exec_script, tx, input_idx, sig_hash_type, spent_outputs[input_idx].nValue, ) else: raise NotImplemented private_key = ECKey() private_key.set(key, True) if script_case.get('schnorr', False): signature = private_key.sign_schnorr(sighash) else: signature = private_key.sign_ecdsa(sighash) signature += bytes( [tx_case.get('suffix', sig_hash_type & 0xff)]) sigs.append(signature) control_block = bytearray(pubkey_bytes) control_block[0] = 0xc0 control_block[0] |= int(pubkey_bytes[0] == 0x03) control_block += result_item['path'] tx.vin[input_idx].scriptSig = CScript( script_case['script_inputs'] + sigs + [exec_script, control_block]) # Broadcast transaction and check success/failure tx.rehash() if 'error' not in tx_case: node.p2p.send_txs_and_test([tx], node) else: node.p2p.send_txs_and_test([tx], node, success=False, reject_reason=tx_case['error'])
def run_test(self): node = self.nodes[0] # Build a fake quorum of nodes. def get_quorum(): return [get_ava_p2p_interface(node) for _ in range(0, QUORUM_NODE_COUNT)] # Pick on node from the quorum for polling. quorum = get_quorum() poll_node = quorum[0] # Generate many block and poll for them. addrkey0 = node.get_deterministic_priv_key() blockhashes = node.generatetoaddress(100, addrkey0.address) # Use the first coinbase to create a stake stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key) fork_node = self.nodes[1] # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Get the key so we can verify signatures. avakey = ECPubKey() avakey.set(bytes.fromhex(node.getavalanchekey())) self.log.info("Poll for the chain tip...") best_block_hash = int(node.getbestblockhash(), 16) poll_node.send_poll([best_block_hash]) def assert_response(expected): response = poll_node.wait_for_avaresponse() r = response.response assert_equal(r.cooldown, 0) # Verify signature. assert avakey.verify_schnorr(response.sig, r.get_hash()) votes = r.votes assert_equal(len(votes), len(expected)) for i in range(0, len(votes)): assert_equal(repr(votes[i]), repr(expected[i])) assert_response([AvalancheVote(BLOCK_ACCEPTED, best_block_hash)]) self.log.info("Poll for a selection of blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(1), 16), int(node.getblockhash(10), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), int(node.getblockhash(96), 16), int(node.getblockhash(99), 16), int(node.getblockhash(100), 16), ] poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes]) self.log.info( "Poll for a selection of blocks, but some are now invalid...") invalidated_block = node.getblockhash(76) node.invalidateblock(invalidated_block) # We need to send the coin to a new address in order to make sure we do # not regenerate the same block. node.generatetoaddress( 26, 'ecregtest:pqv2r67sgz3qumufap3h2uuj0zfmnzuv8v38gtrh5v') node.reconsiderblock(invalidated_block) poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:5]] + [AvalancheVote(BLOCK_FORK, h) for h in various_block_hashes[-3:]]) self.log.info("Poll for unknown blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), various_block_hashes[5], various_block_hashes[6], various_block_hashes[7], random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), ] poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:3]] + [AvalancheVote(BLOCK_FORK, h) for h in various_block_hashes[3:6]] + [AvalancheVote(BLOCK_UNKNOWN, h) for h in various_block_hashes[-3:]]) self.log.info("Trigger polling from the node...") # duplicate the deterministic sig test from src/test/key_tests.cpp privkey = ECKey() privkey.set(bytes.fromhex( "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747"), True) pubkey = privkey.get_pubkey() proof_sequence = 11 proof_expiration = 12 proof = node.buildavalancheproof( proof_sequence, proof_expiration, pubkey.get_bytes().hex(), stakes) # Activate the quorum. for n in quorum: success = node.addavalanchenode( n.nodeid, pubkey.get_bytes().hex(), proof) assert success is True self.log.info("Testing getavalanchepeerinfo...") avapeerinfo = node.getavalanchepeerinfo() # There is a single peer because all nodes share the same proof. assert_equal(len(avapeerinfo), 1) assert_equal(avapeerinfo[0]["peerid"], 0) assert_equal(avapeerinfo[0]["nodecount"], len(quorum)) # The first avalanche node index is 1, because 0 is self.nodes[1]. assert_equal(sorted(avapeerinfo[0]["nodes"]), list(range(1, QUORUM_NODE_COUNT + 1))) assert_equal(avapeerinfo[0]["proof"], proof) def can_find_block_in_poll(hash, resp=BLOCK_ACCEPTED): found_hash = False for n in quorum: poll = n.get_avapoll_if_available() # That node has not received a poll if poll is None: continue # We got a poll, check for the hash and repond votes = [] for inv in poll.invs: # Vote yes to everything r = BLOCK_ACCEPTED # Look for what we expect if inv.hash == hash: r = resp found_hash = True votes.append(AvalancheVote(r, inv.hash)) n.send_avaresponse(poll.round, votes, privkey) return found_hash # Now that we have a peer, we should start polling for the tip. hash_tip = int(node.getbestblockhash(), 16) wait_until(lambda: can_find_block_in_poll(hash_tip), timeout=5) # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Create a fork 2 blocks deep. This should trigger polling. fork_node.invalidateblock(fork_node.getblockhash(100)) fork_address = fork_node.get_deterministic_priv_key().address fork_node.generatetoaddress(2, fork_address) # Because the new tip is a deep reorg, the node will not accept it # right away, but poll for it. def parked_block(blockhash): for tip in node.getchaintips(): if tip["hash"] == blockhash: assert tip["status"] != "active" return tip["status"] == "parked" return False fork_tip = fork_node.getbestblockhash() wait_until(lambda: parked_block(fork_tip)) self.log.info("Answer all polls to finalize...") hash_to_find = int(fork_tip, 16) def has_accepted_new_tip(): can_find_block_in_poll(hash_to_find) return node.getbestblockhash() == fork_tip # Because everybody answers yes, the node will accept that block. wait_until(has_accepted_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) self.log.info("Answer all polls to park...") node.generate(1) tip_to_park = node.getbestblockhash() hash_to_find = int(tip_to_park, 16) assert(tip_to_park != fork_tip) def has_parked_new_tip(): can_find_block_in_poll(hash_to_find, BLOCK_PARKED) return node.getbestblockhash() == fork_tip # Because everybody answers no, the node will park that block. wait_until(has_parked_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) self.log.info( "Check the node is discouraging unexpected avaresponses.") with node.assert_debug_log( ['Misbehaving', 'peer=1 (0 -> 2): unexpected-ava-response']): # unknown voting round poll_node.send_avaresponse( round=2**32 - 1, votes=[], privkey=privkey)
def run_test(self): node0, node1, node2 = self.nodes self.wallet = MiniWallet(test_node=node0) if self.is_bdb_compiled(): self.check_addmultisigaddress_errors() self.log.info('Generating blocks ...') self.generate(self.wallet, 149) self.moved = 0 for self.nkeys in [3, 5]: for self.nsigs in [2, 3]: for self.output_type in ["bech32", "p2sh-segwit", "legacy"]: self.get_keys() self.do_multisig() if self.is_bdb_compiled(): self.checkbalances() # Test mixed compressed and uncompressed pubkeys self.log.info( 'Mixed compressed and uncompressed multisigs are not allowed') pk0, pk1, pk2 = [ getnewdestination('bech32')[0].hex() for _ in range(3) ] # decompress pk2 pk_obj = ECPubKey() pk_obj.set(bytes.fromhex(pk2)) pk_obj.compressed = False pk2 = pk_obj.get_bytes().hex() if self.is_bdb_compiled(): node0.createwallet(wallet_name='wmulti0', disable_private_keys=True) wmulti0 = node0.get_wallet_rpc('wmulti0') # Check all permutations of keys because order matters apparently for keys in itertools.permutations([pk0, pk1, pk2]): # Results should be the same as this legacy one legacy_addr = node0.createmultisig(2, keys, 'legacy')['address'] if self.is_bdb_compiled(): result = wmulti0.addmultisigaddress(2, keys, '', 'legacy') assert_equal(legacy_addr, result['address']) assert 'warnings' not in result # Generate addresses with the segwit types. These should all make legacy addresses err_msg = [ "Unable to make chosen address type, please ensure no uncompressed public keys are present." ] for addr_type in ['bech32', 'p2sh-segwit']: result = self.nodes[0].createmultisig(nrequired=2, keys=keys, address_type=addr_type) assert_equal(legacy_addr, result['address']) assert_equal(result['warnings'], err_msg) if self.is_bdb_compiled(): result = wmulti0.addmultisigaddress(nrequired=2, keys=keys, address_type=addr_type) assert_equal(legacy_addr, result['address']) assert_equal(result['warnings'], err_msg) self.log.info( 'Testing sortedmulti descriptors with BIP 67 test vectors') with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f: vectors = json.load(f) for t in vectors: key_str = ','.join(t['keys']) desc = descsum_create('sh(sortedmulti(2,{}))'.format(key_str)) assert_equal(self.nodes[0].deriveaddresses(desc)[0], t['address']) sorted_key_str = ','.join(t['sorted_keys']) sorted_key_desc = descsum_create( 'sh(multi(2,{}))'.format(sorted_key_str)) assert_equal(self.nodes[0].deriveaddresses(sorted_key_desc)[0], t['address']) # Check that bech32m is currently not allowed assert_raises_rpc_error( -5, "createmultisig cannot create bech32m multisig addresses", self.nodes[0].createmultisig, 2, self.pub, "bech32m")