def run_test(self): self.bootstrap_p2p() # Add one p2p connection to the node self.block_heights = {} self.coinbase_key = ECKey() self.coinbase_key.generate() self.coinbase_pubkey = self.coinbase_key.get_pubkey().get_bytes() self.tip = None self.blocks = {} self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 self.spendable_outputs = [] # Create a new block b0 = self.next_block(0) self.save_spendable_output() self.sync_blocks([b0]) # Allow the block to mature blocks = [] for i in range(129): blocks.append(self.next_block(5000 + i)) self.save_spendable_output() self.sync_blocks(blocks) # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(33): out.append(self.get_spendable_output()) # Start by building a block on top. # setup -> b13 (0) b13 = self.next_block(13, spend=out[0]) self.save_spendable_output() self.sync_blocks([b13]) # Add a block with MAX_BLOCK_SIGOPS_PER_MB and one with one more sigop # setup -> b13 (0) -> b15 (5) -> b16 (6) self.log.info("Accept a block with lots of checksigs") lots_of_checksigs = CScript( [OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB - 1)) self.move_tip(13) b15 = self.next_block(15, spend=out[5], script=lots_of_checksigs) self.save_spendable_output() self.sync_blocks([b15], True) self.log.info("Reject a block with too many checksigs") too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB)) b16 = self.next_block(16, spend=out[6], script=too_many_checksigs) self.sync_blocks([b16], success=False, reject_reason='bad-blk-sigops', reconnect=True) self.move_tip(15) # ... skipped feature_block tests ... # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY # # setup -> ... b15 (5) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b36 (11) # \-> b34 (10) # \-> b32 (9) # # MULTISIG: each op code counts as 20 sigops. To create the edge case, # pack another 19 sigops at the end. self.log.info( "Accept a block with the max number of OP_CHECKMULTISIG sigops") lots_of_multisigs = CScript( [OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS_PER_MB - 1) // 20) + [OP_CHECKSIG] * 19) b31 = self.next_block(31, spend=out[8], script=lots_of_multisigs) assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS_PER_MB) self.sync_blocks([b31], True) self.save_spendable_output() # this goes over the limit because the coinbase has one sigop self.log.info("Reject a block with too many OP_CHECKMULTISIG sigops") too_many_multisigs = CScript( [OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS_PER_MB // 20)) b32 = self.next_block(32, spend=out[9], script=too_many_multisigs) assert_equal(get_legacy_sigopcount_block( b32), MAX_BLOCK_SIGOPS_PER_MB + 1) self.sync_blocks([b32], success=False, reject_reason='bad-blk-sigops', reconnect=True) # CHECKMULTISIGVERIFY self.log.info( "Accept a block with the max number of OP_CHECKMULTISIGVERIFY sigops") self.move_tip(31) lots_of_multisigs = CScript( [OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS_PER_MB - 1) // 20) + [OP_CHECKSIG] * 19) b33 = self.next_block(33, spend=out[9], script=lots_of_multisigs) self.sync_blocks([b33], True) self.save_spendable_output() self.log.info( "Reject a block with too many OP_CHECKMULTISIGVERIFY sigops") too_many_multisigs = CScript( [OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS_PER_MB // 20)) b34 = self.next_block(34, spend=out[10], script=too_many_multisigs) self.sync_blocks([b34], success=False, reject_reason='bad-blk-sigops', reconnect=True) # CHECKSIGVERIFY self.log.info( "Accept a block with the max number of OP_CHECKSIGVERIFY sigops") self.move_tip(33) lots_of_checksigs = CScript( [OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS_PER_MB - 1)) b35 = self.next_block(35, spend=out[10], script=lots_of_checksigs) self.sync_blocks([b35], True) self.save_spendable_output() self.log.info("Reject a block with too many OP_CHECKSIGVERIFY sigops") too_many_checksigs = CScript( [OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS_PER_MB)) b36 = self.next_block(36, spend=out[11], script=too_many_checksigs) self.sync_blocks([b36], success=False, reject_reason='bad-blk-sigops', reconnect=True) # ... skipped feature_block tests ... # Check P2SH SigOp counting # # # ... -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12) # \-> b40 (12) # # b39 - create some P2SH outputs that will require 6 sigops to spend: # # redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG # p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL # self.log.info("Check P2SH SIGOPS are correctly counted") self.move_tip(35) b39 = self.next_block(39) b39_outputs = 0 b39_sigops_per_output = 6 # Build the redeem script, hash it, use hash to create the p2sh script redeem_script = CScript([self.coinbase_pubkey] + [ OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG]) redeem_script_hash = hash160(redeem_script) p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE # This must be signed because it is spending a coinbase spend = out[11] tx = self.create_tx(spend, 0, 1, p2sh_script) tx.vout.append(CTxOut(spend.vout[0].nValue - 1, CScript([OP_TRUE]))) self.sign_tx(tx, spend) tx.rehash() b39 = self.update_block(39, [tx]) b39_outputs += 1 # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest # to OP_TRUE tx_new = None tx_last = tx tx_last_n = len(tx.vout) - 1 total_size = len(b39.serialize()) while(total_size < LEGACY_MAX_BLOCK_SIZE): tx_new = self.create_tx(tx_last, tx_last_n, 1, p2sh_script) tx_new.vout.append( CTxOut(tx_last.vout[tx_last_n].nValue - 1, CScript([OP_TRUE]))) tx_new.rehash() total_size += len(tx_new.serialize()) if total_size >= LEGACY_MAX_BLOCK_SIZE: break b39.vtx.append(tx_new) # add tx to block tx_last = tx_new tx_last_n = len(tx_new.vout) - 1 b39_outputs += 1 b39 = self.update_block(39, []) self.sync_blocks([b39], True) self.save_spendable_output() # Test sigops in P2SH redeem scripts # # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops. # The first tx has one sigop and then at the end we add 2 more to put us just over the max. # # b41 does the same, less one, so it has the maximum sigops permitted. # self.log.info("Reject a block with too many P2SH sigops") self.move_tip(39) b40 = self.next_block(40, spend=out[12]) sigops = get_legacy_sigopcount_block(b40) numTxs = (MAX_BLOCK_SIGOPS_PER_MB - sigops) // b39_sigops_per_output assert_equal(numTxs <= b39_outputs, True) lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) lastAmount = b40.vtx[1].vout[0].nValue new_txs = [] for i in range(1, numTxs + 1): tx = CTransaction() tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) tx.vin.append(CTxIn(lastOutpoint, b'')) # second input is corresponding P2SH output from b39 tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) # Note: must pass the redeem_script (not p2sh_script) to the # signature hash function sighash = SignatureHashForkId( redeem_script, tx, 1, SIGHASH_ALL | SIGHASH_FORKID, lastAmount) sig = self.coinbase_key.sign_ecdsa( sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])) scriptSig = CScript([sig, redeem_script]) tx.vin[1].scriptSig = scriptSig pad_tx(tx) tx.rehash() new_txs.append(tx) lastOutpoint = COutPoint(tx.sha256, 0) lastAmount = tx.vout[0].nValue b40_sigops_to_fill = MAX_BLOCK_SIGOPS_PER_MB - \ (numTxs * b39_sigops_per_output + sigops) + 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) pad_tx(tx) tx.rehash() new_txs.append(tx) self.update_block(40, new_txs) self.sync_blocks([b40], success=False, reject_reason='bad-blk-sigops', reconnect=True) # same as b40, but one less sigop self.log.info("Accept a block with the max number of P2SH sigops") self.move_tip(39) b41 = self.next_block(41, spend=None) self.update_block(41, [b40tx for b40tx in b40.vtx[1:] if b40tx != tx]) b41_sigops_to_fill = b40_sigops_to_fill - 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill))) pad_tx(tx) self.update_block(41, [tx]) self.sync_blocks([b41], True) # ... skipped feature_block tests ... b72 = self.next_block(72) self.save_spendable_output() self.sync_blocks([b72]) # Test some invalid scripts and MAX_BLOCK_SIGOPS_PER_MB # # ..... -> b72 # \-> b** (22) # # b73 - tx with excessive sigops that are placed after an excessively large script element. # The purpose of the test is to make sure those sigops are counted. # # script is a bytearray of size 20,526 # # bytearray[0-19,998] : OP_CHECKSIG # bytearray[19,999] : OP_PUSHDATA4 # bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format) # bytearray[20,004-20,525]: unread data (script_element) # bytearray[20,526] : OP_CHECKSIG (this puts us over the limit) self.log.info( "Reject a block containing too many sigops after a large script element") self.move_tip(72) b73 = self.next_block(73) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS_PER_MB - 1] = int("4e", 16) # OP_PUSHDATA4 element_size = MAX_SCRIPT_ELEMENT_SIZE + 1 a[MAX_BLOCK_SIGOPS_PER_MB] = element_size % 256 a[MAX_BLOCK_SIGOPS_PER_MB + 1] = element_size // 256 a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0 a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0 tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) b73 = self.update_block(73, [tx]) assert_equal(get_legacy_sigopcount_block( b73), MAX_BLOCK_SIGOPS_PER_MB + 1) self.sync_blocks([b73], success=False, reject_reason='bad-blk-sigops', reconnect=True) # b74/75 - if we push an invalid script element, all prevous sigops are counted, # but sigops after the element are not counted. # # The invalid script element is that the push_data indicates that # there will be a large amount of data (0xffffff bytes), but we only # provide a much smaller number. These bytes are CHECKSIGS so they would # cause b75 to fail for excessive sigops, if those bytes were counted. # # b74 fails because we put MAX_BLOCK_SIGOPS_PER_MB+1 before the element # b75 succeeds because we put MAX_BLOCK_SIGOPS_PER_MB before the # element self.log.info( "Check sigops are counted correctly after an invalid script element") self.move_tip(72) b74 = self.next_block(74) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + \ MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS_PER_MB] = 0x4e a[MAX_BLOCK_SIGOPS_PER_MB + 1] = 0xfe a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 4] = 0xff tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) b74 = self.update_block(74, [tx]) self.sync_blocks([b74], success=False, reject_reason='bad-blk-sigops', reconnect=True) self.move_tip(72) b75 = self.next_block(75) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS_PER_MB - 1] = 0x4e a[MAX_BLOCK_SIGOPS_PER_MB] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 1] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0xff tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) b75 = self.update_block(75, [tx]) self.sync_blocks([b75], True) self.save_spendable_output() # Check that if we push an element filled with CHECKSIGs, they are not # counted self.move_tip(75) b76 = self.next_block(76) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 a = bytearray([OP_CHECKSIG] * size) # PUSHDATA4, but leave the following bytes as just checksigs a[MAX_BLOCK_SIGOPS_PER_MB - 1] = 0x4e tx = self.create_and_sign_transaction(out[23], 1, CScript(a)) b76 = self.update_block(76, [tx]) self.sync_blocks([b76], True) self.save_spendable_output()
def run_test(self): node = self.nodes[0] self.log.info('Start with empty mempool, and 200 blocks') self.mempool_size = 0 assert_equal(node.getblockcount(), 200) assert_equal(node.getmempoolinfo()['size'], self.mempool_size) coins = node.listunspent() self.log.info('Should not accept garbage to testmempoolaccept') assert_raises_rpc_error( -3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar')) assert_raises_rpc_error( -8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22'])) assert_raises_rpc_error( -22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar'])) self.log.info('A transaction already in the blockchain') # Pick a random coin(base) to spend coin = coins.pop() raw_tx_in_block = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ 'txid': coin['txid'], 'vout': coin['vout'] }], outputs=[{ node.getnewaddress(): 30 }, { node.getnewaddress(): 70 }], ))['hex'] txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, maxfeerate=0) node.generate(1) self.mempool_size = 0 self.check_mempool_result( result_expected=[{ 'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known' }], rawtxs=[raw_tx_in_block], ) self.log.info('A transaction not in the mempool') fee = 0.000700 raw_tx_0 = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ "txid": txid_in_block, "vout": 0, "sequence": 0xfffffffd }], outputs=[{ node.getnewaddress(): 30 - fee }], ))['hex'] tx = FromHex(CTransaction(), raw_tx_0) tx.calc_txid() txid_0 = tx.txid_hex self.check_mempool_result( result_expected=[{ 'txid': txid_0, 'allowed': True }], rawtxs=[raw_tx_0], ) self.log.info('A final transaction not in the mempool') # Pick a random coin(base) to spend coin = coins.pop() raw_tx_final = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ 'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff }], # SEQUENCE_FINAL outputs=[{ node.getnewaddress(): 2.5 }], locktime=node.getblockcount() + 2000, # Can be anything ))['hex'] tx = FromHex(CTransaction(), raw_tx_final) tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': True }], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0) self.mempool_size += 1 self.log.info('A transaction in the mempool') node.sendrawtransaction(hexstring=raw_tx_0) self.mempool_size += 1 self.check_mempool_result( result_expected=[{ 'txid': txid_0, 'allowed': False, 'reject-reason': 'txn-already-in-mempool' }], rawtxs=[raw_tx_0], ) # Removed RBF test # self.log.info('A transaction that replaces a mempool transaction') # ... self.log.info('A transaction that conflicts with an unconfirmed tx') # Send the transaction that conflicts with the mempool transaction node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0) # take original raw_tx_0 tx = FromHex(CTransaction(), raw_tx_0) tx.vout[0].nValue -= int(4 * fee * COIN) # Set more fee tx.calc_txid() # skip re-signing the tx self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'txn-mempool-conflict' }], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) self.log.info('A transaction with missing inputs, that never existed') tx = FromHex(CTransaction(), raw_tx_0) tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14) tx.calc_txid() # skip re-signing the tx self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'missing-inputs' }], rawtxs=[ToHex(tx)], ) self.log.info( 'A transaction with missing inputs, that existed once in the past') tx = FromHex(CTransaction(), raw_tx_0) # Set vout to 1, to spend the other outpoint (49 coins) of the # in-chain-tx we want to double spend tx.vin[0].prevout.n = 1 raw_tx_1 = node.signrawtransactionwithwallet(ToHex(tx))['hex'] txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0) # Now spend both to "clearly hide" the outputs, ie. remove the coins # from the utxo set by spending them raw_tx_spend_both = node.signrawtransactionwithwallet( node.createrawtransaction(inputs=[ { 'txid': txid_0, 'vout': 0 }, { 'txid': txid_1, 'vout': 0 }, ], outputs=[{ node.getnewaddress(): 10 }]))['hex'] txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, maxfeerate=0) node.generate(1) self.mempool_size = 0 # Now see if we can add the coins back to the utxo set by sending the # exact txs again self.check_mempool_result( result_expected=[{ 'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs' }], rawtxs=[raw_tx_0], ) self.check_mempool_result( result_expected=[{ 'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs' }], rawtxs=[raw_tx_1], ) self.log.info('Create a signed "reference" tx for later use') raw_tx_reference = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ 'txid': txid_spend_both, 'vout': 0 }], outputs=[{ node.getnewaddress(): 5 }], ))['hex'] tx = FromHex(CTransaction(), raw_tx_reference) tx.calc_txid() # Reference tx should be valid on itself self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': True }], rawtxs=[ToHex(tx)], maxfeerate=0, ) self.log.info('A transaction with no outputs') tx = FromHex(CTransaction(), raw_tx_reference) tx.vout = [] tx.calc_txid() # Skip re-signing the transaction for context independent checks from now on # FromHex(tx, node.signrawtransactionwithwallet(ToHex(tx))['hex']) self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-txns-vout-empty' }], rawtxs=[ToHex(tx)], ) self.log.info('A really large transaction') tx = FromHex(CTransaction(), raw_tx_reference) tx.vin = [tx.vin[0] ] * (1 + MAX_BLOCK_BASE_SIZE // len(tx.vin[0].serialize())) tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-txns-oversize' }], rawtxs=[ToHex(tx)], ) self.log.info('A transaction with negative output value') tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0].nValue *= -1 tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-txns-vout-negative' }], rawtxs=[ToHex(tx)], ) # The following two validations prevent overflow of the output amounts # (see CVE-2010-5139). self.log.info('A transaction with too large output value') tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0].nValue = MAX_MONEY + 1 tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-txns-vout-toolarge' }], rawtxs=[ToHex(tx)], ) self.log.info('A transaction with too large sum of output values') tx = FromHex(CTransaction(), raw_tx_reference) tx.vout = [tx.vout[0]] * 2 tx.vout[0].nValue = MAX_MONEY tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-txns-txouttotal-toolarge' }], rawtxs=[ToHex(tx)], ) self.log.info('A transaction with duplicate inputs') tx = FromHex(CTransaction(), raw_tx_reference) tx.vin = [tx.vin[0]] * 2 tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-txns-inputs-duplicate' }], rawtxs=[ToHex(tx)], ) self.log.info('A coinbase transaction') # Pick the input of the first tx we signed, so it has to be a coinbase # tx raw_tx_coinbase_spent = node.getrawtransaction( txid=node.decoderawtransaction( hexstring=raw_tx_in_block)['vin'][0]['txid']) tx = FromHex(CTransaction(), raw_tx_coinbase_spent) tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-tx-coinbase' }], rawtxs=[ToHex(tx)], ) self.log.info('Some nonstandard transactions') tx = FromHex(CTransaction(), raw_tx_reference) tx.nVersion = 3 # A version currently non-standard tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'version' }], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'scriptpubkey' }], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) key = ECKey() key.generate() pubkey = key.get_pubkey().get_bytes() # Some bare multisig script (2-of-3) tx.vout[0].scriptPubKey = CScript( [OP_2, pubkey, pubkey, pubkey, OP_3, OP_CHECKMULTISIG]) tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bare-multisig' }], rawtxs=[tx.serialize().hex()], ) tx = FromHex(CTransaction(), raw_tx_reference) # Some not-pushonly scriptSig tx.vin[0].scriptSig = CScript([OP_HASH160]) tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'scriptsig-not-pushonly' }], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) # Some too large scriptSig (>1650 bytes) tx.vin[0].scriptSig = CScript([b'a' * 1648]) tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'scriptsig-size' }], rawtxs=[tx.serialize().hex()], ) tx = FromHex(CTransaction(), raw_tx_reference) output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript( [OP_HASH160, hash160(b'burn'), OP_EQUAL])) # Use enough outputs to make the tx too large for our policy num_scripts = 100000 // len(output_p2sh_burn.serialize()) tx.vout = [output_p2sh_burn] * num_scripts tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'tx-size' }], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0] = output_p2sh_burn # Make output smaller, such that it is dust for our policy tx.vout[0].nValue -= 1 tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'dust' }], rawtxs=[ToHex(tx)], ) # 4 OP_RETURN outputs not allowed tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff']) tx.vout = [tx.vout[0]] * 4 tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'multi-op-return' }], rawtxs=[ToHex(tx)], ) self.log.info('A timelocked transaction') tx = FromHex(CTransaction(), raw_tx_reference) # Should be non-max, so locktime is not ignored tx.vin[0].nSequence -= 1 tx.nLockTime = node.getblockcount() + 1 tx.calc_txid() self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'bad-txns-nonfinal' }], rawtxs=[ToHex(tx)], ) self.log.info('A transaction that is locked by BIP68 sequence logic') tx = FromHex(CTransaction(), raw_tx_reference) # We could include it in the second block mined from now, but not the # very next one tx.vin[0].nSequence = 2 tx.calc_txid() # Can skip re-signing the tx because of early rejection self.check_mempool_result( result_expected=[{ 'txid': tx.txid_hex, 'allowed': False, 'reject-reason': 'non-BIP68-final' }], rawtxs=[tx.serialize().hex()], maxfeerate=0, )
def run_test(self): # Generate enough blocks to trigger certain block votes and activate BIP65 (version 4 blocks) amt = 1352 - self.nodes[0].getblockcount() for i in range(int(amt / 100)): self.generate(self.nodes[0], 100) self.sync_all() self.generate(self.nodes[0], 1352 - self.nodes[0].getblockcount()) self.sync_all() self.log.info("checking: not on chain tip") badtip = self.nodes[0].getblockhash(self.nodes[0].getblockcount() - 1) height = self.nodes[0].getblockcount() tip = self.nodes[0].getblockhash(height) coinbase = bu_create_coinbase(height + 1) cur_time = int(time.time()) self.nodes[0].setmocktime(cur_time) self.nodes[1].setmocktime(cur_time) block = create_block(badtip, coinbase, cur_time + 600) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: does not build on chain tip", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: time too far in the past") block = create_block(tip, coinbase, cur_time - 100) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: time-too-old", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: time too far in the future") block = create_block(tip, coinbase, cur_time + 10000000) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: time-too-new", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: bad version 1") block = create_block(tip, coinbase, cur_time + 600) block.nVersion = 1 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-version", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: bad version 2") block = create_block(tip, coinbase, cur_time + 600) block.nVersion = 2 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-version", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: bad version 3") block = create_block(tip, coinbase, cur_time + 600) block.nVersion = 3 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-version", self.nodes[0].validateblocktemplate, hexblk) # Note: 'bad-cb-height' test case cannot be tested , because regtest # always has BIP34 checks disabled. self.log.info("checking: bad merkle root") block = create_block(tip, coinbase, cur_time + 600) block.nVersion = 0x20000000 block.hashMerkleRoot = 0x12345678 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-txnmrklroot", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: no tx") block = create_block(tip, None, cur_time + 600) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-cb-missing", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: good block") block = create_block(tip, coinbase, cur_time + 600) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) # validate good block self.nodes[0].validateblocktemplate(hexblk) block.solve() hexblk = ToHex(block) self.nodes[0].submitblock(hexblk) self.sync_all() prev_block = block # out_value is less than 50BTC because regtest halvings happen every 150 blocks, and is in Satoshis out_value = block.vtx[0].vout[0].nValue tx1 = create_transaction(prev_block.vtx[0], 0, b'\x61' * 50 + b'\x51', [int(out_value / 2), int(out_value / 2)]) height = self.nodes[0].getblockcount() tip = self.nodes[0].getblockhash(height) coinbase = bu_create_coinbase(height + 1) next_time = cur_time + 1200 self.log.info("checking: no coinbase") block = create_block(tip, None, next_time, txns=[tx1]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-cb-missing", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: double coinbase") coinbase_key = ECKey() # coinbase_key.set_secretbytes(b"horsebattery") coinbase_key.set(b"horsbatt" * 4, True) coinbase_pubkey = coinbase_key.get_pubkey() coinbase2 = bu_create_coinbase(height + 1, coinbase_pubkey.get_bytes()) block = create_block(tip, coinbase, next_time, txns=[coinbase2, tx1]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-tx-coinbase", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: premature coinbase spend") block = create_block(tip, coinbase, next_time, txns=[tx1]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-txns-premature-spend-of-coinbase", self.nodes[0].validateblocktemplate, hexblk) self.generate(self.nodes[0], 100) self.sync_all() height = self.nodes[0].getblockcount() tip = self.nodes[0].getblockhash(height) coinbase = bu_create_coinbase(height + 1) next_time = cur_time + 1200 op1 = CScript([OP_1]) self.log.info("checking: inputs below outputs") tx6 = create_transaction(prev_block.vtx[0], 0, op1, [out_value + 1000]) block = create_block(tip, coinbase, next_time, txns=[tx6]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-txns-in-belowout", self.nodes[0].validateblocktemplate, hexblk) tx5 = create_transaction(prev_block.vtx[0], 0, op1, [int(21000001 * COIN)]) self.log.info("checking: money range") block = create_block(tip, coinbase, next_time, txns=[tx5]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-txns-vout-toolarge", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: bad tx offset") tx_bad = create_broken_transaction(prev_block.vtx[0], 1, op1, [int(out_value / 4)]) block = create_block(tip, coinbase, next_time, txns=[tx_bad]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-txns-inputs-missingorspent", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: bad tx offset largest number") tx_bad = create_broken_transaction(prev_block.vtx[0], 0xffffffff, op1, [int(out_value / 4)]) block = create_block(tip, coinbase, next_time, txns=[tx_bad]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-txns-inputs-missingorspent", self.nodes[0].validateblocktemplate, hexblk) self.log.info("checking: double tx") tx2 = create_transaction(prev_block.vtx[0], 0, op1, [int(out_value / 4)]) block = create_block(tip, coinbase, next_time, txns=[tx2, tx2]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: tx-duplicate", self.nodes[0].validateblocktemplate, hexblk) tx3 = create_transaction(prev_block.vtx[0], 0, op1, [int(out_value / 9), int(out_value / 10)]) tx4 = create_transaction(prev_block.vtx[0], 0, op1, [int(out_value / 8), int(out_value / 7)]) self.log.info("checking: double spend") block = create_block(tip, coinbase, next_time, txns=[tx3, tx4]) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: bad-txns-inputs-missingorspent", self.nodes[0].validateblocktemplate, hexblk) txes = [tx3, tx4] txes.sort(key=lambda x: x.hash, reverse=True) self.log.info("checking: bad tx ordering") block = create_block(tip, coinbase, next_time, txns=txes, ctor=False) block.nVersion = 0x20000000 block.rehash() hexblk = ToHex(block) assert_raises_rpc_error(-25, "Invalid block: tx-ordering", self.nodes[0].validateblocktemplate, hexblk) tx_good = create_transaction(prev_block.vtx[0], 0, b'\x51', [int(out_value / 50)] * 50, out=b"") self.log.info("checking: good tx") block = create_block(tip, coinbase, next_time, txns=[tx_good]) block.nVersion = 0x20000000 block.rehash() block.solve() hexblk = ToHex(block) self.nodes[0].validateblocktemplate(hexblk) self.nodes[0].submitblock(hexblk) self.sync_all() height = self.nodes[0].getblockcount() tip = self.nodes[0].getblockhash(height) coinbase = bu_create_coinbase(height + 1) next_time = next_time + 600 coinbase_key = ECKey() # coinbase_key.set_secretbytes(b"horsebattery") coinbase_key.set(b"horsbatt" * 4, True) coinbase_pubkey = coinbase_key.get_pubkey() #coinbase3 = bu_create_coinbase(height + 1, coinbase_pubkey) bu_create_coinbase(height + 1, coinbase_pubkey.get_bytes()) txl = [] for i in range(0, 50): ov = block.vtx[1].vout[i].nValue txl.append(create_transaction(block.vtx[1], i, op1, [int(ov / 50)] * 50)) block = create_block(tip, coinbase, next_time, txns=txl) block.nVersion = 0x20000000 block.rehash() block.solve() hexblk = ToHex(block) for n in self.nodes: n.validateblocktemplate(hexblk) # Note: BCHN does not have RPC method like BU's 'setminingmaxblocksize' yet, # therefore we omit some test cases related to excessiveblock size when # modifying that setting on the fly. # # The related BU error messages that we do not test for here are: # - RPC error 25, message: "Invalid block: excessive" # - RPC error 25, message: "Sorry, your maximum mined block (1000) is larger than your proposed excessive size (999). This would cause you to orphan your own blocks." # Randomly change bytes for it in range(0, 100): h2 = hexblk pos = random.randint(0, len(hexblk)) val = random.randint(0, 15) h3 = h2[:pos] + ('{:x}'.format(val)) + h2[pos + 1:] try: self.nodes[0].validateblocktemplate(h3) except JSONRPCException as e: if e.error["code"] in (-1, -22, -25): self.log.info(f"Got expected exception: " + str(e)) else: self.log.exception(f"Failed for iteration {it}") # its ok we expect garbage self.nodes[1].submitblock(hexblk) self.sync_all() height = self.nodes[0].getblockcount() tip = self.nodes[0].getblockhash(height) coinbase = bu_create_coinbase(height + 1) next_time = next_time + 600 prev_block = block txl = [] for tx in prev_block.vtx: for outp in range(0, len(tx.vout)): ov = tx.vout[outp].nValue txl.append(create_transaction(tx, outp, CScript([OP_CHECKSIG] * 100), [int(ov / 2)] * 2)) block = create_block(tip, coinbase, next_time, txns=txl) block.nVersion = 0x20000000 block.rehash() block.solve() hexblk = ToHex(block) for n in self.nodes: assert_raises_rpc_error(-25, "Invalid block: bad-txns-premature-spend-of-coinbase", self.nodes[0].validateblocktemplate, hexblk)
def run_test(self): p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) # Build the blockchain self.tip = int(self.nodes[0].getbestblockhash(), 16) self.block_time = self.nodes[0].getblock( self.nodes[0].getbestblockhash())['time'] + 1 self.blocks = [] # Get a pubkey for the coinbase TXO coinbase_key = ECKey() coinbase_key.generate() coinbase_pubkey = coinbase_key.get_pubkey().get_bytes() # Create the first block with a coinbase output to our key height = 1 block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time) self.blocks.append(block) self.block_time += 1 block.solve() # Save the coinbase for later self.block1 = block self.tip = block.sha256 height += 1 # Bury the block 100 deep so the coinbase output is spendable for _ in range(100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 height += 1 # Create a transaction spending the coinbase output with an invalid (null) signature tx = CTransaction() tx.vin.append( CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b"")) tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE]))) tx.calc_sha256() block102 = create_block(self.tip, create_coinbase(height), self.block_time) self.block_time += 1 block102.vtx.extend([tx]) block102.hashMerkleRoot = block102.calc_merkle_root() block102.solve() self.blocks.append(block102) self.tip = block102.sha256 self.block_time += 1 height += 1 # Bury the assumed valid block 2100 deep for _ in range(2100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.nVersion = 4 block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 height += 1 self.nodes[0].disconnect_p2ps() # Start node1 and node2 with assumevalid so they accept a block with a bad signature. self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)]) self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)]) p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) p2p1 = self.nodes[1].add_p2p_connection(BaseNode()) p2p2 = self.nodes[2].add_p2p_connection(BaseNode()) # send header lists to all three nodes p2p0.send_header_for_blocks(self.blocks[0:2000]) p2p0.send_header_for_blocks(self.blocks[2000:]) p2p1.send_header_for_blocks(self.blocks[0:2000]) p2p1.send_header_for_blocks(self.blocks[2000:]) p2p2.send_header_for_blocks(self.blocks[0:200]) # Send blocks to node0. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p0) self.wait_until( lambda: self.nodes[0].getblockcount() >= COINBASE_MATURITY + 1) assert_equal(self.nodes[0].getblockcount(), COINBASE_MATURITY + 1) # Send all blocks to node1. All blocks will be accepted. for i in range(2202): p2p1.send_message(msg_block(self.blocks[i])) # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync. p2p1.sync_with_ping(960) assert_equal( self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202) # Send blocks to node2. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p2) self.wait_until( lambda: self.nodes[2].getblockcount() >= COINBASE_MATURITY + 1) assert_equal(self.nodes[2].getblockcount(), COINBASE_MATURITY + 1)
class MiniWallet: def __init__(self, test_node, *, mode=MiniWalletMode.ADDRESS_OP_TRUE): self._test_node = test_node self._utxos = [] self._mode = mode assert isinstance(mode, MiniWalletMode) if mode == MiniWalletMode.RAW_OP_TRUE: self._scriptPubKey = bytes(CScript([OP_TRUE])) elif mode == MiniWalletMode.RAW_P2PK: # use simple deterministic private key (k=1) self._priv_key = ECKey() self._priv_key.set((1).to_bytes(32, 'big'), True) pub_key = self._priv_key.get_pubkey() self._scriptPubKey = key_to_p2pk_script(pub_key.get_bytes()) elif mode == MiniWalletMode.ADDRESS_OP_TRUE: self._address, self._internal_key = create_deterministic_address_bcrt1_p2tr_op_true( ) self._scriptPubKey = bytes.fromhex( self._test_node.validateaddress(self._address)['scriptPubKey']) def _create_utxo(self, *, txid, vout, value, height): return {"txid": txid, "vout": vout, "value": value, "height": height} def _bulk_tx(self, tx, target_weight): """Pad a transaction with extra outputs until it reaches a target weight (or higher). returns the tx """ tx.vout.append( CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN, b'a']))) dummy_vbytes = (target_weight - tx.get_weight() + 3) // 4 tx.vout[-1].scriptPubKey = CScript([OP_RETURN, b'a' * dummy_vbytes]) # Lower bound should always be off by at most 3 assert_greater_than_or_equal(tx.get_weight(), target_weight) # Higher bound should always be off by at most 3 + 12 weight (for encoding the length) assert_greater_than_or_equal(target_weight + 15, tx.get_weight()) def get_balance(self): return sum(u['value'] for u in self._utxos) def rescan_utxos(self): """Drop all utxos and rescan the utxo set""" self._utxos = [] res = self._test_node.scantxoutset(action="start", scanobjects=[self.get_descriptor()]) assert_equal(True, res['success']) for utxo in res['unspents']: self._utxos.append( self._create_utxo(txid=utxo["txid"], vout=utxo["vout"], value=utxo["amount"], height=utxo["height"])) def scan_tx(self, tx): """Scan the tx and adjust the internal list of owned utxos""" for spent in tx["vin"]: # Mark spent. This may happen when the caller has ownership of a # utxo that remained in this wallet. For example, by passing # mark_as_spent=False to get_utxo or by using an utxo returned by a # create_self_transfer* call. try: self.get_utxo(txid=spent["txid"], vout=spent["vout"]) except StopIteration: pass for out in tx['vout']: if out['scriptPubKey']['hex'] == self._scriptPubKey.hex(): self._utxos.append( self._create_utxo(txid=tx["txid"], vout=out["n"], value=out["value"], height=0)) def sign_tx(self, tx, fixed_length=True): """Sign tx that has been created by MiniWallet in P2PK mode""" assert_equal(self._mode, MiniWalletMode.RAW_P2PK) (sighash, err) = LegacySignatureHash(CScript(self._scriptPubKey), tx, 0, SIGHASH_ALL) assert err is None # for exact fee calculation, create only signatures with fixed size by default (>49.89% probability): # 65 bytes: high-R val (33 bytes) + low-S val (32 bytes) # with the DER header/skeleton data of 6 bytes added, this leads to a target size of 71 bytes der_sig = b'' while not len(der_sig) == 71: der_sig = self._priv_key.sign_ecdsa(sighash) if not fixed_length: break tx.vin[0].scriptSig = CScript( [der_sig + bytes(bytearray([SIGHASH_ALL]))]) tx.rehash() def generate(self, num_blocks, **kwargs): """Generate blocks with coinbase outputs to the internal address, and call rescan_utxos""" blocks = self._test_node.generatetodescriptor(num_blocks, self.get_descriptor(), **kwargs) # Calling rescan_utxos here makes sure that after a generate the utxo # set is in a clean state. For example, the wallet will update # - if the caller consumed utxos, but never used them # - if the caller sent a transaction that is not mined or got rbf'd # - after block re-orgs # - the utxo height for mined mempool txs # - However, the wallet will not consider remaining mempool txs self.rescan_utxos() return blocks def get_scriptPubKey(self): return self._scriptPubKey def get_descriptor(self): return descsum_create(f'raw({self._scriptPubKey.hex()})') def get_address(self): assert_equal(self._mode, MiniWalletMode.ADDRESS_OP_TRUE) return self._address def get_utxo(self, *, txid: str = '', vout: Optional[int] = None, mark_as_spent=True) -> dict: """ Returns a utxo and marks it as spent (pops it from the internal list) Args: txid: get the first utxo we find from a specific transaction """ self._utxos = sorted( self._utxos, key=lambda k: (k['value'], -k['height'])) # Put the largest utxo last if txid: utxo_filter: Any = filter(lambda utxo: txid == utxo['txid'], self._utxos) else: utxo_filter = reversed(self._utxos) # By default the largest utxo if vout is not None: utxo_filter = filter(lambda utxo: vout == utxo['vout'], utxo_filter) index = self._utxos.index(next(utxo_filter)) if mark_as_spent: return self._utxos.pop(index) else: return self._utxos[index] def get_utxos(self, *, mark_as_spent=True): """Returns the list of all utxos and optionally mark them as spent""" utxos = deepcopy(self._utxos) if mark_as_spent: self._utxos = [] return utxos def send_self_transfer(self, *, from_node, **kwargs): """Call create_self_transfer and send the transaction.""" tx = self.create_self_transfer(**kwargs) self.sendrawtransaction(from_node=from_node, tx_hex=tx['hex']) return tx def send_to(self, *, from_node, scriptPubKey, amount, fee=1000): """ Create and send a tx with an output to a given scriptPubKey/amount, plus a change output to our internal address. To keep things simple, a fixed fee given in Satoshi is used. Note that this method fails if there is no single internal utxo available that can cover the cost for the amount and the fixed fee (the utxo with the largest value is taken). Returns a tuple (txid, n) referring to the created external utxo outpoint. """ tx = self.create_self_transfer(fee_rate=0)["tx"] assert_greater_than_or_equal(tx.vout[0].nValue, amount + fee) tx.vout[0].nValue -= (amount + fee) # change output -> MiniWallet tx.vout.append(CTxOut( amount, scriptPubKey)) # arbitrary output -> to be returned txid = self.sendrawtransaction(from_node=from_node, tx_hex=tx.serialize().hex()) return txid, 1 def send_self_transfer_multi(self, *, from_node, **kwargs): """Call create_self_transfer_multi and send the transaction.""" tx = self.create_self_transfer_multi(**kwargs) self.sendrawtransaction(from_node=from_node, tx_hex=tx["hex"]) return tx def create_self_transfer_multi(self, *, utxos_to_spend: Optional[List[dict]] = None, num_outputs=1, amount_per_output=0, sequence=0, fee_per_output=1000, target_weight=0): """ Create and return a transaction that spends the given UTXOs and creates a certain number of outputs with equal amounts. The output amounts can be set by amount_per_output or automatically calculated with a fee_per_output. """ utxos_to_spend = utxos_to_spend or [self.get_utxo()] sequence = [sequence] * len(utxos_to_spend) if type( sequence) is int else sequence assert_equal(len(utxos_to_spend), len(sequence)) # create simple tx template (1 input, 1 output) tx = self.create_self_transfer(fee_rate=0, utxo_to_spend=utxos_to_spend[0])["tx"] # duplicate inputs, witnesses and outputs tx.vin = [deepcopy(tx.vin[0]) for _ in range(len(utxos_to_spend))] for txin, seq in zip(tx.vin, sequence): txin.nSequence = seq tx.wit.vtxinwit = [ deepcopy(tx.wit.vtxinwit[0]) for _ in range(len(utxos_to_spend)) ] tx.vout = [deepcopy(tx.vout[0]) for _ in range(num_outputs)] # adapt input prevouts for i, utxo in enumerate(utxos_to_spend): tx.vin[i] = CTxIn(COutPoint(int(utxo['txid'], 16), utxo['vout'])) # adapt output amounts (use fixed fee per output) inputs_value_total = sum( [int(COIN * utxo['value']) for utxo in utxos_to_spend]) outputs_value_total = inputs_value_total - fee_per_output * num_outputs for o in tx.vout: o.nValue = amount_per_output or (outputs_value_total // num_outputs) if target_weight: self._bulk_tx(tx, target_weight) txid = tx.rehash() return { "new_utxos": [ self._create_utxo( txid=txid, vout=i, value=Decimal(tx.vout[i].nValue) / COIN, height=0, ) for i in range(len(tx.vout)) ], "txid": txid, "hex": tx.serialize().hex(), "tx": tx, } def create_self_transfer(self, *, fee_rate=Decimal("0.003"), fee=Decimal("0"), utxo_to_spend=None, locktime=0, sequence=0, target_weight=0): """Create and return a tx with the specified fee. If fee is 0, use fee_rate, where the resulting fee may be exact or at most one satoshi higher than needed.""" utxo_to_spend = utxo_to_spend or self.get_utxo() assert fee_rate >= 0 assert fee >= 0 if self._mode in (MiniWalletMode.RAW_OP_TRUE, MiniWalletMode.ADDRESS_OP_TRUE): vsize = Decimal(104) # anyone-can-spend elif self._mode == MiniWalletMode.RAW_P2PK: vsize = Decimal( 168 ) # P2PK (73 bytes scriptSig + 35 bytes scriptPubKey + 60 bytes other) else: assert False send_value = utxo_to_spend["value"] - (fee or (fee_rate * vsize / 1000)) assert send_value > 0 tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(utxo_to_spend['txid'], 16), utxo_to_spend['vout']), nSequence=sequence) ] tx.vout = [ CTxOut(int(COIN * send_value), bytearray(self._scriptPubKey)) ] tx.nLockTime = locktime if self._mode == MiniWalletMode.RAW_P2PK: self.sign_tx(tx) elif self._mode == MiniWalletMode.RAW_OP_TRUE: tx.vin[0].scriptSig = CScript([OP_NOP] * 43) # pad to identical size elif self._mode == MiniWalletMode.ADDRESS_OP_TRUE: tx.wit.vtxinwit = [CTxInWitness()] tx.wit.vtxinwit[0].scriptWitness.stack = [ CScript([OP_TRUE]), bytes([LEAF_VERSION_TAPSCRIPT]) + self._internal_key ] else: assert False assert_equal(tx.get_vsize(), vsize) if target_weight: self._bulk_tx(tx, target_weight) tx_hex = tx.serialize().hex() new_utxo = self._create_utxo(txid=tx.rehash(), vout=0, value=send_value, height=0) return { "txid": new_utxo["txid"], "wtxid": tx.getwtxid(), "hex": tx_hex, "tx": tx, "new_utxo": new_utxo } def sendrawtransaction(self, *, from_node, tx_hex, maxfeerate=0, **kwargs): txid = from_node.sendrawtransaction(hexstring=tx_hex, maxfeerate=maxfeerate, **kwargs) self.scan_tx(from_node.decoderawtransaction(tx_hex)) return txid def send_self_transfer_chain(self, *, from_node, chain_length, utxo_to_spend=None): """Create and send a "chain" of chain_length transactions. The nth transaction in the chain is a child of the n-1th transaction and parent of the n+1th transaction. Returns the chaintip (nth) utxo """ chaintip_utxo = utxo_to_spend or self.get_utxo() for _ in range(chain_length): chaintip_utxo = self.send_self_transfer( utxo_to_spend=chaintip_utxo, from_node=from_node)["new_utxo"] return chaintip_utxo
def run_test(self): self.log.info("Setup wallets...") # w0 is a wallet with coinbase rewards w0 = self.nodes[0].get_wallet_rpc(self.default_wallet_name) # w1 is a regular wallet self.nodes[1].createwallet(wallet_name="w1") w1 = self.nodes[1].get_wallet_rpc("w1") # w2 contains the private keys for w3 self.nodes[1].createwallet(wallet_name="w2", blank=True) w2 = self.nodes[1].get_wallet_rpc("w2") xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v" xpub = "tpubD6NzVbkrYhZ4YkEfMbRJkQyZe7wTkbTNRECozCtJPtdLRn6cT1QKb8yHjwAPcAr26eHBFYs5iLiFFnCbwPRsncCKUKCfubHDMGKzMVcN1Jg" if self.options.descriptors: w2.importdescriptors([{ "desc": descsum_create("wpkh(" + xpriv + "/0/0/*)"), "timestamp": "now", "range": [0, 100], "active": True }, { "desc": descsum_create("wpkh(" + xpriv + "/0/1/*)"), "timestamp": "now", "range": [0, 100], "active": True, "internal": True }]) else: w2.sethdseed(True) # w3 is a watch-only wallet, based on w2 self.nodes[1].createwallet(wallet_name="w3", disable_private_keys=True) w3 = self.nodes[1].get_wallet_rpc("w3") if self.options.descriptors: # Match the privkeys in w2 for descriptors res = w3.importdescriptors([{ "desc": descsum_create("wpkh(" + xpub + "/0/0/*)"), "timestamp": "now", "range": [0, 100], "keypool": True, "active": True, "watchonly": True }, { "desc": descsum_create("wpkh(" + xpub + "/0/1/*)"), "timestamp": "now", "range": [0, 100], "keypool": True, "active": True, "internal": True, "watchonly": True }]) assert_equal(res, [{"success": True}, {"success": True}]) for _ in range(3): a2_receive = w2.getnewaddress() if not self.options.descriptors: # Because legacy wallets use exclusively hardened derivation, we can't do a ranged import like we do for descriptors a2_change = w2.getrawchangeaddress( ) # doesn't actually use change derivation res = w3.importmulti([{ "desc": w2.getaddressinfo(a2_receive)["desc"], "timestamp": "now", "keypool": True, "watchonly": True }, { "desc": w2.getaddressinfo(a2_change)["desc"], "timestamp": "now", "keypool": True, "internal": True, "watchonly": True }]) assert_equal(res, [{"success": True}, {"success": True}]) w0.sendtoaddress(a2_receive, 10) # fund w3 self.generate(self.nodes[0], 1) if not self.options.descriptors: # w4 has private keys enabled, but only contains watch-only keys (from w2) # This is legacy wallet behavior only as descriptor wallets don't allow watchonly and non-watchonly things in the same wallet. self.nodes[1].createwallet(wallet_name="w4", disable_private_keys=False) w4 = self.nodes[1].get_wallet_rpc("w4") for _ in range(3): a2_receive = w2.getnewaddress() res = w4.importmulti([{ "desc": w2.getaddressinfo(a2_receive)["desc"], "timestamp": "now", "keypool": False, "watchonly": True }]) assert_equal(res, [{"success": True}]) w0.sendtoaddress(a2_receive, 10) # fund w4 self.generate(self.nodes[0], 1) self.log.info("Send to address...") self.test_send(from_wallet=w0, to_wallet=w1, amount=1) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True) self.log.info("Don't broadcast...") res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False) assert (res["hex"]) self.log.info("Return PSBT...") res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, psbt=True) assert (res["psbt"]) self.log.info( "Create transaction that spends to address, but don't broadcast..." ) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False) # conf_target & estimate_mode can be set as argument or option res1 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical", add_to_wallet=False) res2 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=1, estimate_mode="economical", add_to_wallet=False) assert_equal(self.nodes[1].decodepsbt(res1["psbt"])["fee"], self.nodes[1].decodepsbt(res2["psbt"])["fee"]) # but not at the same time for mode in ["unset", "economical", "conservative"]: self.test_send( from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical", conf_target=1, estimate_mode=mode, add_to_wallet=False, expect_error= (-8, "Pass conf_target and estimate_mode either as arguments or in the options object, but not both" )) self.log.info("Create PSBT from watch-only wallet w3, sign with w2...") res = self.test_send(from_wallet=w3, to_wallet=w1, amount=1) res = w2.walletprocesspsbt(res["psbt"]) assert res["complete"] if not self.options.descriptors: # Descriptor wallets do not allow mixed watch-only and non-watch-only things in the same wallet. # This is specifically testing that w4 ignores its own private keys and creates a psbt with send # which is not something that needs to be tested in descriptor wallets. self.log.info( "Create PSBT from wallet w4 with watch-only keys, sign with w2..." ) self.test_send(from_wallet=w4, to_wallet=w1, amount=1, expect_error=(-4, "Insufficient funds")) res = self.test_send(from_wallet=w4, to_wallet=w1, amount=1, include_watching=True, add_to_wallet=False) res = w2.walletprocesspsbt(res["psbt"]) assert res["complete"] self.log.info("Create OP_RETURN...") self.test_send(from_wallet=w0, to_wallet=w1, amount=1) self.test_send( from_wallet=w0, data="Hello World", expect_error=( -8, "Data must be hexadecimal string (not 'Hello World')")) self.test_send(from_wallet=w0, data="23") res = self.test_send(from_wallet=w3, data="23") res = w2.walletprocesspsbt(res["psbt"]) assert res["complete"] self.log.info("Test setting explicit fee rate") res1 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate="1", add_to_wallet=False) res2 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate="1", add_to_wallet=False) assert_equal(self.nodes[1].decodepsbt(res1["psbt"])["fee"], self.nodes[1].decodepsbt(res2["psbt"])["fee"]) res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=7, add_to_wallet=False) fee = self.nodes[1].decodepsbt(res["psbt"])["fee"] assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00007")) # "unset" and None are treated the same for estimate_mode res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=2, estimate_mode="unset", add_to_wallet=False) fee = self.nodes[1].decodepsbt(res["psbt"])["fee"] assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00002")) res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=4.531, add_to_wallet=False) fee = self.nodes[1].decodepsbt(res["psbt"])["fee"] assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00004531")) res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=3, add_to_wallet=False) fee = self.nodes[1].decodepsbt(res["psbt"])["fee"] assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00003")) # Test that passing fee_rate as both an argument and an option raises. self.test_send( from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=1, fee_rate=1, add_to_wallet=False, expect_error= (-8, "Pass the fee_rate either as an argument, or in the options object, but not both" )) assert_raises_rpc_error(-8, "Use fee_rate (sat/vB) instead of feeRate", w0.send, {w1.getnewaddress(): 1}, 6, "conservative", 1, {"feeRate": 0.01}) assert_raises_rpc_error(-3, "Unexpected key totalFee", w0.send, {w1.getnewaddress(): 1}, 6, "conservative", 1, {"totalFee": 0.01}) for target, mode in product([-1, 0, 1009], ["economical", "conservative"]): self.test_send( from_wallet=w0, to_wallet=w1, amount=1, conf_target=target, estimate_mode=mode, expect_error=(-8, "Invalid conf_target, must be between 1 and 1008" )) # max value of 1008 per src/policy/fees.h msg = 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"' for target, mode in product([-1, 0], ["umk/kb", "sat/b"]): self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=target, estimate_mode=mode, expect_error=(-8, msg)) for mode in ["", "foo", Decimal("3.141592")]: self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode=mode, expect_error=(-8, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=0.1, arg_estimate_mode=mode, expect_error=(-8, msg)) assert_raises_rpc_error(-8, msg, w0.send, {w1.getnewaddress(): 1}, 0.1, mode) for mode in ["economical", "conservative"]: for k, v in { "string": "true", "bool": True, "object": { "foo": "bar" } }.items(): self.test_send( from_wallet=w0, to_wallet=w1, amount=1, conf_target=v, estimate_mode=mode, expect_error=( -3, f"Expected type number for conf_target, got {k}")) # Test setting explicit fee rate just below the minimum of 1 sat/vB. self.log.info( "Explicit fee rate raises RPC error 'fee rate too low' if fee_rate of 0.99999999 is passed" ) msg = "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)" self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=0.999, expect_error=(-4, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=0.999, expect_error=(-4, msg)) self.log.info("Explicit fee rate raises if invalid fee_rate is passed") # Test fee_rate with zero values. msg = "Fee rate (0.000 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)" for zero_value in [0, 0.000, 0.00000000, "0", "0.000", "0.00000000"]: self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=zero_value, expect_error=(-4, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=zero_value, expect_error=(-4, msg)) msg = "Invalid amount" # Test fee_rate values that don't pass fixed-point parsing checks. for invalid_value in [ "", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999" ]: self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg)) # Test fee_rate values that cannot be represented in sat/vB. for invalid_value in [ 0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999" ]: self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg)) # Test fee_rate out of range (negative number). msg = "Amount out of range" self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=-1, expect_error=(-3, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=-1, expect_error=(-3, msg)) # Test type error. msg = "Amount is not a number or string" for invalid_value in [True, {"foo": "bar"}]: self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg)) # TODO: Return hex if fee rate is below -maxmempool # res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode="sat/b", add_to_wallet=False) # assert res["hex"] # hex = res["hex"] # res = self.nodes[0].testmempoolaccept([hex]) # assert not res[0]["allowed"] # assert_equal(res[0]["reject-reason"], "...") # low fee # assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.000001")) self.log.info( "If inputs are specified, do not automatically add more...") res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[], add_to_wallet=False) assert res["complete"] utxo1 = w0.listunspent()[0] assert_equal(utxo1["amount"], 50) self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], expect_error=(-4, "Insufficient funds")) self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=False, expect_error=(-4, "Insufficient funds")) res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=True, add_to_wallet=False) assert res["complete"] self.log.info("Manual change address and position...") self.test_send( from_wallet=w0, to_wallet=w1, amount=1, change_address="not an address", expect_error=(-5, "Change address must be a valid umkoin address")) change_address = w0.getnewaddress() self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address) assert res["complete"] res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address, change_position=0) assert res["complete"] assert_equal( self.nodes[0].decodepsbt( res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["address"], change_address) res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_type="legacy", change_position=0) assert res["complete"] change_address = self.nodes[0].decodepsbt( res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["address"] assert change_address[0] == "m" or change_address[0] == "n" self.log.info("Set lock time...") height = self.nodes[0].getblockchaininfo()["blocks"] res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, locktime=height + 1) assert res["complete"] assert res["txid"] txid = res["txid"] # Although the wallet finishes the transaction, it can't be added to the mempool yet: hex = self.nodes[0].gettransaction(res["txid"])["hex"] res = self.nodes[0].testmempoolaccept([hex]) assert not res[0]["allowed"] assert_equal(res[0]["reject-reason"], "non-final") # It shouldn't be confirmed in the next block self.generate(self.nodes[0], 1) assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 0) # The mempool should allow it now: res = self.nodes[0].testmempoolaccept([hex]) assert res[0]["allowed"] # Don't wait for wallet to add it to the mempool: res = self.nodes[0].sendrawtransaction(hex) self.generate(self.nodes[0], 1) assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 1) self.log.info("Lock unspents...") utxo1 = w0.listunspent()[0] assert_greater_than(utxo1["amount"], 1) res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False, lock_unspents=True) assert res["complete"] locked_coins = w0.listlockunspent() assert_equal(len(locked_coins), 1) # Locked coins are automatically unlocked when manually selected res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False) assert res["complete"] self.log.info("Replaceable...") res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=True) assert res["complete"] assert_equal( self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "yes") res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=False) assert res["complete"] assert_equal( self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "no") self.log.info("Subtract fee from output") self.test_send(from_wallet=w0, to_wallet=w1, amount=1, subtract_fee_from_outputs=[0]) self.log.info("Include unsafe inputs") self.nodes[1].createwallet(wallet_name="w5") w5 = self.nodes[1].get_wallet_rpc("w5") self.test_send(from_wallet=w0, to_wallet=w5, amount=2) self.test_send(from_wallet=w5, to_wallet=w0, amount=1, expect_error=(-4, "Insufficient funds")) res = self.test_send(from_wallet=w5, to_wallet=w0, amount=1, include_unsafe=True) assert res["complete"] self.log.info("External outputs") eckey = ECKey() eckey.generate() privkey = bytes_to_wif(eckey.get_bytes()) self.nodes[1].createwallet("extsend") ext_wallet = self.nodes[1].get_wallet_rpc("extsend") self.nodes[1].createwallet("extfund") ext_fund = self.nodes[1].get_wallet_rpc("extfund") # Make a weird but signable script. sh(pkh()) descriptor accomplishes this desc = descsum_create("sh(pkh({}))".format(privkey)) if self.options.descriptors: res = ext_fund.importdescriptors([{ "desc": desc, "timestamp": "now" }]) else: res = ext_fund.importmulti([{"desc": desc, "timestamp": "now"}]) assert res[0]["success"] addr = self.nodes[0].deriveaddresses(desc)[0] addr_info = ext_fund.getaddressinfo(addr) self.nodes[0].sendtoaddress(addr, 10) self.nodes[0].sendtoaddress(ext_wallet.getnewaddress(), 10) self.generate(self.nodes[0], 6) ext_utxo = ext_fund.listunspent(addresses=[addr])[0] # An external input without solving data should result in an error self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, expect_error=(-4, "Insufficient funds")) # But funding should work when the solving data is provided res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, solving_data={ "pubkeys": [addr_info['pubkey']], "scripts": [addr_info["embedded"]["scriptPubKey"]] }) signed = ext_wallet.walletprocesspsbt(res["psbt"]) signed = ext_fund.walletprocesspsbt(res["psbt"]) assert signed["complete"] self.nodes[0].finalizepsbt(signed["psbt"]) res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, solving_data={"descriptors": [desc]}) signed = ext_wallet.walletprocesspsbt(res["psbt"]) signed = ext_fund.walletprocesspsbt(res["psbt"]) assert signed["complete"] self.nodes[0].finalizepsbt(signed["psbt"]) dec = self.nodes[0].decodepsbt(signed["psbt"]) for i, txin in enumerate(dec["tx"]["vin"]): if txin["txid"] == ext_utxo["txid"] and txin["vout"] == ext_utxo[ "vout"]: input_idx = i break psbt_in = dec["inputs"][input_idx] # Calculate the input weight # (prevout + sequence + length of scriptSig + 2 bytes buffer) * 4 + len of scriptwitness len_scriptsig = len(psbt_in["final_scriptSig"]["hex"] ) // 2 if "final_scriptSig" in psbt_in else 0 len_scriptwitness = len( psbt_in["final_scriptwitness"] ["hex"]) // 2 if "final_scriptwitness" in psbt_in else 0 input_weight = ((41 + len_scriptsig + 2) * 4) + len_scriptwitness # Input weight error conditions assert_raises_rpc_error( -8, "Input weights should be specified in inputs rather than in options.", ext_wallet.send, outputs={self.nodes[0].getnewaddress(): 15}, options={ "inputs": [ext_utxo], "input_weights": [{ "txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": 1000 }] }) # Funding should also work when input weights are provided res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[{ "txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": input_weight }], add_inputs=True, psbt=True, include_watching=True, fee_rate=10) signed = ext_wallet.walletprocesspsbt(res["psbt"]) signed = ext_fund.walletprocesspsbt(res["psbt"]) assert signed["complete"] tx = self.nodes[0].finalizepsbt(signed["psbt"]) testres = self.nodes[0].testmempoolaccept([tx["hex"]])[0] assert_equal(testres["allowed"], True) assert_fee_amount(testres["fees"]["base"], testres["vsize"], Decimal(0.0001))
def run_test(self): node = self.nodes[0] # duplicate the deterministic sig test from src/test/key_tests.cpp privkey = ECKey() privkey.set( bytes.fromhex( "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747" ), True) pubkey = privkey.get_pubkey() self.log.info( "Check the node is signalling the avalanche service bit only if there is a proof." ) assert_equal( int(node.getnetworkinfo()['localservices'], 16) & NODE_AVALANCHE, 0) # Create stakes by mining blocks addrkey0 = node.get_deterministic_priv_key() blockhashes = node.generatetoaddress(2, addrkey0.address) stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key) proof_sequence = 11 proof_expiration = 12 proof = node.buildavalancheproof(proof_sequence, proof_expiration, pubkey.get_bytes().hex(), stakes) # Restart the node self.restart_node( 0, self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ]) assert_equal( int(node.getnetworkinfo()['localservices'], 16) & NODE_AVALANCHE, NODE_AVALANCHE) def check_avahello(args): # Restart the node with the given args self.restart_node(0, self.extra_args[0] + args) peer = get_ava_p2p_interface(node) avahello = peer.wait_for_avahello().hello avakey = ECPubKey() avakey.set(bytes.fromhex(node.getavalanchekey())) assert avakey.verify_schnorr(avahello.sig, avahello.get_sighash(peer)) self.log.info( "Test the avahello signature with a generated delegation") check_avahello([ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN" ]) master_key = ECKey() master_key.generate() limited_id = FromHex(AvalancheProof(), proof).limited_proofid delegation = node.delegateavalancheproof( f"{limited_id:0{64}x}", bytes_to_wif(privkey.get_bytes()), master_key.get_pubkey().get_bytes().hex(), ) self.log.info("Test the avahello signature with a supplied delegation") check_avahello([ "-avaproof={}".format(proof), "-avadelegation={}".format(delegation), "-avamasterkey={}".format(bytes_to_wif(master_key.get_bytes())), ]) stakes = create_coinbase_stakes(node, [blockhashes[1]], addrkey0.key) interface_proof_hex = node.buildavalancheproof( proof_sequence, proof_expiration, pubkey.get_bytes().hex(), stakes) limited_id = FromHex(AvalancheProof(), interface_proof_hex).limited_proofid # delegate delegated_key = ECKey() delegated_key.generate() interface_delegation_hex = node.delegateavalancheproof( f"{limited_id:0{64}x}", bytes_to_wif(privkey.get_bytes()), delegated_key.get_pubkey().get_bytes().hex(), None) self.log.info("Test that wrong avahello signature causes a ban") bad_interface = get_ava_p2p_interface(node) wrong_key = ECKey() wrong_key.generate() with node.assert_debug_log([ "Misbehaving", "peer=1 (0 -> 100) BAN THRESHOLD EXCEEDED: invalid-avahello-signature" ]): bad_interface.send_avahello(interface_delegation_hex, wrong_key) bad_interface.wait_for_disconnect() self.log.info( 'Check that receiving a valid avahello triggers a proof getdata request' ) good_interface = get_ava_p2p_interface(node) proofid = good_interface.send_avahello(interface_delegation_hex, delegated_key) def getdata_found(): with p2p_lock: return good_interface.last_message.get( "getdata") and good_interface.last_message["getdata"].inv[ -1].hash == proofid wait_until(getdata_found) self.log.info('Check that we can download the proof from our peer') node_proofid = FromHex(AvalancheProof(), proof).proofid def wait_for_proof_validation(): # Connect some blocks to trigger the proof verification node.generate(1) wait_until(lambda: node_proofid in get_proof_ids(node)) wait_for_proof_validation() getdata = msg_getdata([CInv(MSG_AVA_PROOF, node_proofid)]) self.log.info( "Proof has been inv'ed recently, check it can be requested") good_interface.send_message(getdata) def proof_received(peer): with p2p_lock: return peer.last_message.get("avaproof") and peer.last_message[ "avaproof"].proof.proofid == node_proofid wait_until(lambda: proof_received(good_interface)) # Restart the node self.restart_node( 0, self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ]) wait_for_proof_validation() self.log.info( "The proof has not been announced, it cannot be requested") peer = get_ava_p2p_interface(node, services=NODE_NETWORK) peer.send_message(getdata) # Give enough time for the node to answer. Since we cannot check for a # non-event this is the best we can do time.sleep(2) assert not proof_received(peer) self.log.info("The proof is known for long enough to be requested") current_time = int(time.time()) node.setmocktime(current_time + UNCONDITIONAL_RELAY_DELAY) peer.send_message(getdata) wait_until(lambda: proof_received(peer))
def run_test(self): node = self.nodes[0] # Build a fake quorum of nodes. def get_node(): n = TestNode() node.add_p2p_connection( n, services=NODE_NETWORK | NODE_AVALANCHE) n.wait_for_verack() # Get our own node id so we can use it later. n.nodeid = node.getpeerinfo()[-1]['id'] return n def get_quorum(): return [get_node() for _ in range(0, QUORUM_NODE_COUNT)] # Pick on node from the quorum for polling. quorum = get_quorum() poll_node = quorum[0] # Generate many block and poll for them. addrkey0 = node.get_deterministic_priv_key() blockhashes = node.generatetoaddress(100, addrkey0.address) # Use the first coinbase to create a stake stakes = get_stakes(node, [blockhashes[0]], addrkey0.key) fork_node = self.nodes[1] # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Get the key so we can verify signatures. avakey = ECPubKey() avakey.set(bytes.fromhex(node.getavalanchekey())) self.log.info("Poll for the chain tip...") best_block_hash = int(node.getbestblockhash(), 16) poll_node.send_poll([best_block_hash]) def assert_response(expected): response = poll_node.wait_for_avaresponse() r = response.response assert_equal(r.cooldown, 0) # Verify signature. assert avakey.verify_schnorr(response.sig, r.get_hash()) votes = r.votes assert_equal(len(votes), len(expected)) for i in range(0, len(votes)): assert_equal(repr(votes[i]), repr(expected[i])) assert_response([AvalancheVote(BLOCK_ACCEPTED, best_block_hash)]) self.log.info("Poll for a selection of blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(1), 16), int(node.getblockhash(10), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), int(node.getblockhash(96), 16), int(node.getblockhash(99), 16), int(node.getblockhash(100), 16), ] poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes]) self.log.info( "Poll for a selection of blocks, but some are now invalid...") invalidated_block = node.getblockhash(76) node.invalidateblock(invalidated_block) # We need to send the coin to a new address in order to make sure we do # not regenerate the same block. node.generatetoaddress( 26, 'bchreg:pqv2r67sgz3qumufap3h2uuj0zfmnzuv8v7ej0fffv') node.reconsiderblock(invalidated_block) poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:5]] + [AvalancheVote(BLOCK_FORK, h) for h in various_block_hashes[-3:]]) self.log.info("Poll for unknown blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), various_block_hashes[5], various_block_hashes[6], various_block_hashes[7], random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), ] poll_node.send_poll(various_block_hashes) assert_response([AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:3]] + [AvalancheVote(BLOCK_FORK, h) for h in various_block_hashes[3:6]] + [AvalancheVote(BLOCK_UNKNOWN, h) for h in various_block_hashes[-3:]]) self.log.info("Trigger polling from the node...") # duplicate the deterministic sig test from src/test/key_tests.cpp privkey = ECKey() privkey.set(bytes.fromhex( "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747"), True) pubkey = privkey.get_pubkey() proof_sequence = 11 proof_expiration = 12 proof = node.buildavalancheproof( proof_sequence, proof_expiration, pubkey.get_bytes().hex(), stakes) # Activate the quorum. for n in quorum: success = node.addavalanchenode( n.nodeid, pubkey.get_bytes().hex(), proof) assert success is True self.log.info("Testing getavalanchepeerinfo...") avapeerinfo = node.getavalanchepeerinfo() # There is a single peer because all nodes share the same proof. assert_equal(len(avapeerinfo), 1) assert_equal(avapeerinfo[0]["peerid"], 0) assert_equal(avapeerinfo[0]["nodecount"], len(quorum)) # The first avalanche node index is 1, because 0 is self.nodes[1]. assert_equal(sorted(avapeerinfo[0]["nodes"]), list(range(1, QUORUM_NODE_COUNT + 1))) assert_equal(avapeerinfo[0]["sequence"], proof_sequence) assert_equal(avapeerinfo[0]["expiration"], proof_expiration) assert_equal(avapeerinfo[0]["master"], pubkey.get_bytes().hex()) assert_equal(avapeerinfo[0]["proof"], proof) assert_equal(len(avapeerinfo[0]["stakes"]), 1) assert_equal(avapeerinfo[0]["stakes"][0]["txid"], stakes[0]['txid']) def can_find_block_in_poll(hash, resp=BLOCK_ACCEPTED): found_hash = False for n in quorum: poll = n.get_avapoll_if_available() # That node has not received a poll if poll is None: continue # We got a poll, check for the hash and repond votes = [] for inv in poll.invs: # Vote yes to everything r = BLOCK_ACCEPTED # Look for what we expect if inv.hash == hash: r = resp found_hash = True votes.append(AvalancheVote(r, inv.hash)) n.send_avaresponse(poll.round, votes, privkey) return found_hash # Now that we have a peer, we should start polling for the tip. hash_tip = int(node.getbestblockhash(), 16) wait_until(lambda: can_find_block_in_poll(hash_tip), timeout=5) # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Create a fork 2 blocks deep. This should trigger polling. fork_node.invalidateblock(fork_node.getblockhash(100)) fork_address = fork_node.get_deterministic_priv_key().address fork_node.generatetoaddress(2, fork_address) # Because the new tip is a deep reorg, the node will not accept it # right away, but poll for it. def parked_block(blockhash): for tip in node.getchaintips(): if tip["hash"] == blockhash: assert tip["status"] != "active" return tip["status"] == "parked" return False fork_tip = fork_node.getbestblockhash() wait_until(lambda: parked_block(fork_tip)) self.log.info("Answer all polls to finalize...") hash_to_find = int(fork_tip, 16) def has_accepted_new_tip(): can_find_block_in_poll(hash_to_find) return node.getbestblockhash() == fork_tip # Because everybody answers yes, the node will accept that block. wait_until(has_accepted_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) self.log.info("Answer all polls to park...") node.generate(1) tip_to_park = node.getbestblockhash() hash_to_find = int(tip_to_park, 16) assert(tip_to_park != fork_tip) def has_parked_new_tip(): can_find_block_in_poll(hash_to_find, BLOCK_PARKED) return node.getbestblockhash() == fork_tip # Because everybody answers no, the node will park that block. wait_until(has_parked_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) self.log.info( "Check the node is signalling the avalanche service bit only if there is a proof.") assert_equal( int(node.getnetworkinfo()['localservices'], 16) & NODE_AVALANCHE, 0) # Restart the node self.restart_node(0, self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ]) assert_equal( int(node.getnetworkinfo()['localservices'], 16) & NODE_AVALANCHE, NODE_AVALANCHE) self.log.info("Test the avahello signature") quorum = get_quorum() poll_node = quorum[0] avahello = poll_node.wait_for_avahello().hello avakey.set(bytes.fromhex(node.getavalanchekey())) assert avakey.verify_schnorr( avahello.sig, avahello.get_sighash(poll_node))
def run_test(self): self.node = self.nodes[0] privkey = byte_to_base58(hash256(struct.pack('<I', 0)), 239) self.node.importprivkey(privkey) self.bootstrap_p2p() # returns a test case that asserts that the current tip was accepted # First generate some blocks so we have some spendable coins block_hashes = self.node.generatetoaddress(100, "qSrM9K6FMhZ29Vkp8Rdk8Jp66bbfpjFETq") for i in range(COINBASE_MATURITY): self.tip = create_block(int(self.node.getbestblockhash(), 16), create_coinbase(self.node.getblockcount()+1), int(time.time())) self.tip.solve() self.sync_all_blocks([self.tip], success=True) for _ in range(10): self.node.sendtoaddress("qSrM9K6FMhZ29Vkp8Rdk8Jp66bbfpjFETq", 1000) block_hashes += self.node.generatetoaddress(1, "qSrM9K6FMhZ29Vkp8Rdk8Jp66bbfpjFETq") blocks = [] for block_hash in block_hashes: blocks.append(self.node.getblock(block_hash)) # These are our staking txs self.staking_prevouts = [] self.bad_vout_staking_prevouts = [] self.bad_txid_staking_prevouts = [] self.unconfirmed_staking_prevouts = [] for unspent in self.node.listunspent(): for block in blocks: if unspent['txid'] in block['tx']: tx_block_time = block['time'] break else: assert(False) if unspent['confirmations'] > COINBASE_MATURITY: self.staking_prevouts.append((COutPoint(int(unspent['txid'], 16), unspent['vout']), int(unspent['amount'])*COIN, tx_block_time)) self.bad_vout_staking_prevouts.append((COutPoint(int(unspent['txid'], 16), 0xff), int(unspent['amount'])*COIN, tx_block_time)) self.bad_txid_staking_prevouts.append((COutPoint(int(unspent['txid'], 16)+1, unspent['vout']), int(unspent['amount'])*COIN, tx_block_time)) if unspent['confirmations'] < COINBASE_MATURITY: self.unconfirmed_staking_prevouts.append((COutPoint(int(unspent['txid'], 16), unspent['vout']), int(unspent['amount'])*COIN, tx_block_time)) # First let 25 seconds pass so that we do not submit blocks directly after the last one #time.sleep(100) block_count = self.node.getblockcount() # 1 A block that does not have the correct timestamp mask t = int(time.time()) | 1 (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts, nTime=t) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, force_send=True, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 2 A block that with a too high reward (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts, outNValue=30006) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 3 A block with an incorrect block sig bad_key = ECKey() bad_key.set(hash256(b'horse staple battery'), False) (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.sign_block(bad_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True, force_send=True) self._remove_from_staking_prevouts(self.tip) # 4 A block that stakes with txs with too few confirmations (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.unconfirmed_staking_prevouts) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True, force_send=True) self._remove_from_staking_prevouts(self.tip) # 5 A block that with a coinbase reward (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx[0].vout[0].nValue = 1 self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 6 A block that with no vout in the coinbase (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx[0].vout = [] self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 7 A block way into the future t = (int(time.time())+100) & 0xfffffff0 (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts, nTime=t) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, force_send=True) self._remove_from_staking_prevouts(self.tip) # 8 No vout in the staking tx (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx[1].vout = [] self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 9 Unsigned coinstake. (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts, signStakeTx=False) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 10 A block without a coinstake tx. (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx.pop(-1) self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 11 A block without a coinbase. (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx.pop(0) self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 12 A block where the coinbase has no outputs (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx[0].vout = [] self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 13 A block where the coinstake has no outputs (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx[1].vout.pop(-1) self.tip.vtx[1].vout.pop(-1) stake_tx_signed_raw_hex = self.node.signrawtransactionwithwallet(bytes_to_hex_str(self.tip.vtx[1].serialize()))['hex'] f = io.BytesIO(hex_str_to_bytes(stake_tx_signed_raw_hex)) self.tip.vtx[1] = CTransaction() self.tip.vtx[1].deserialize(f) self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 14 A block with an incorrect hashStateRoot (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.hashStateRoot = 0xe self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 15 A block with an incorrect hashUTXORoot (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.hashUTXORoot = 0xe self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 16 A block with an a signature on wrong header data (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.sign_block(block_sig_key) self.tip.nNonce = 0xfffe self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True, force_send=True) self._remove_from_staking_prevouts(self.tip) # 17 A block with where the pubkey of the second output of the coinstake has been modified after block signing (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) scriptPubKey = self.tip.vtx[1].vout[1].scriptPubKey # Modify a byte of the pubkey self.tip.vtx[1].vout[1].scriptPubKey = scriptPubKey[0:20] + bytes.fromhex(hex(ord(scriptPubKey[20:21])+1)[2:4]) + scriptPubKey[21:] assert_equal(len(scriptPubKey), len(self.tip.vtx[1].vout[1].scriptPubKey)) stake_tx_signed_raw_hex = self.node.signrawtransactionwithwallet(bytes_to_hex_str(self.tip.vtx[1].serialize()))['hex'] f = io.BytesIO(hex_str_to_bytes(stake_tx_signed_raw_hex)) self.tip.vtx[1] = CTransaction() self.tip.vtx[1].deserialize(f) self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 18. A block in the past t = (int(time.time())-700) & 0xfffffff0 (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts, nTime=t) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, force_send=True, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 19. A block with too many coinbase vouts (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.vtx[0].vout.append(CTxOut(0, CScript([OP_TRUE]))) self.tip.vtx[0].rehash() self.tip.hashMerkleRoot = self.tip.calc_merkle_root() self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 20. A block where the coinstake's vin is not the prevout specified in the block (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts, coinStakePrevout=self.staking_prevouts[-1][0]) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True) self._remove_from_staking_prevouts(self.tip) # 21. A block that stakes with valid txs but invalid vouts (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.bad_vout_staking_prevouts) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True, force_send=True) self._remove_from_staking_prevouts(self.tip) # 22. A block that stakes with txs that do not exist (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.bad_txid_staking_prevouts) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=False, reconnect=True, force_send=True) self._remove_from_staking_prevouts(self.tip) # Make sure for certain that no blocks were accepted. (This is also to make sure that no segfaults ocurred) assert_equal(self.node.getblockcount(), block_count) # And at last, make sure that a valid pos block is accepted (self.tip, block_sig_key) = self.create_unsigned_pos_block(self.staking_prevouts) self.tip.sign_block(block_sig_key) self.tip.rehash() self.sync_all_blocks([self.tip], success=True) assert_equal(self.node.getblockcount(), block_count+1)
def run_test(self): node = self.nodes[0] # Build a fake quorum of nodes. def get_quorum(): return [ get_ava_p2p_interface(node) for _ in range(0, QUORUM_NODE_COUNT) ] # Pick on node from the quorum for polling. quorum = get_quorum() poll_node = quorum[0] # Generate many block and poll for them. addrkey0 = node.get_deterministic_priv_key() blockhashes = node.generatetoaddress(100, addrkey0.address) # Use the first coinbase to create a stake stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key) fork_node = self.nodes[1] # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Get the key so we can verify signatures. avakey = ECPubKey() avakey.set(bytes.fromhex(node.getavalanchekey())) self.log.info("Poll for the chain tip...") best_block_hash = int(node.getbestblockhash(), 16) poll_node.send_poll([best_block_hash]) def assert_response(expected): response = poll_node.wait_for_avaresponse() r = response.response assert_equal(r.cooldown, 0) # Verify signature. assert avakey.verify_schnorr(response.sig, r.get_hash()) votes = r.votes assert_equal(len(votes), len(expected)) for i in range(0, len(votes)): assert_equal(repr(votes[i]), repr(expected[i])) assert_response( [AvalancheVote(AvalancheVoteError.ACCEPTED, best_block_hash)]) self.log.info("Poll for a selection of blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(1), 16), int(node.getblockhash(10), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), int(node.getblockhash(96), 16), int(node.getblockhash(99), 16), int(node.getblockhash(100), 16), ] poll_node.send_poll(various_block_hashes) assert_response([ AvalancheVote(AvalancheVoteError.ACCEPTED, h) for h in various_block_hashes ]) self.log.info( "Poll for a selection of blocks, but some are now invalid...") invalidated_block = node.getblockhash(76) node.invalidateblock(invalidated_block) # We need to send the coin to a new address in order to make sure we do # not regenerate the same block. node.generatetoaddress( 26, 'ecregtest:pqv2r67sgz3qumufap3h2uuj0zfmnzuv8v38gtrh5v') node.reconsiderblock(invalidated_block) poll_node.send_poll(various_block_hashes) assert_response([ AvalancheVote(AvalancheVoteError.ACCEPTED, h) for h in various_block_hashes[:5] ] + [ AvalancheVote(AvalancheVoteError.FORK, h) for h in various_block_hashes[-3:] ]) self.log.info("Poll for unknown blocks...") various_block_hashes = [ int(node.getblockhash(0), 16), int(node.getblockhash(25), 16), int(node.getblockhash(42), 16), various_block_hashes[5], various_block_hashes[6], various_block_hashes[7], random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), random.randrange(1 << 255, (1 << 256) - 1), ] poll_node.send_poll(various_block_hashes) assert_response([ AvalancheVote(AvalancheVoteError.ACCEPTED, h) for h in various_block_hashes[:3] ] + [ AvalancheVote(AvalancheVoteError.FORK, h) for h in various_block_hashes[3:6] ] + [ AvalancheVote(AvalancheVoteError.UNKNOWN, h) for h in various_block_hashes[-3:] ]) self.log.info("Trigger polling from the node...") # duplicate the deterministic sig test from src/test/key_tests.cpp privkey = ECKey() privkey.set( bytes.fromhex( "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747" ), True) proof_sequence = 11 proof_expiration = 12 proof = node.buildavalancheproof(proof_sequence, proof_expiration, bytes_to_wif(privkey.get_bytes()), stakes) # Activate the quorum. for n in quorum: success = node.addavalanchenode( n.nodeid, privkey.get_pubkey().get_bytes().hex(), proof) assert success is True def can_find_block_in_poll(hash, resp=AvalancheVoteError.ACCEPTED): found_hash = False for n in quorum: poll = n.get_avapoll_if_available() # That node has not received a poll if poll is None: continue # We got a poll, check for the hash and repond votes = [] for inv in poll.invs: # Vote yes to everything r = AvalancheVoteError.ACCEPTED # Look for what we expect if inv.hash == hash: r = resp found_hash = True votes.append(AvalancheVote(r, inv.hash)) n.send_avaresponse(poll.round, votes, privkey) return found_hash # Now that we have a peer, we should start polling for the tip. hash_tip = int(node.getbestblockhash(), 16) self.wait_until(lambda: can_find_block_in_poll(hash_tip), timeout=5) # Make sure the fork node has synced the blocks self.sync_blocks([node, fork_node]) # Create a fork 2 blocks deep. This should trigger polling. fork_node.invalidateblock(fork_node.getblockhash(100)) fork_address = fork_node.get_deterministic_priv_key().address fork_node.generatetoaddress(2, fork_address) # Because the new tip is a deep reorg, the node will not accept it # right away, but poll for it. def parked_block(blockhash): for tip in node.getchaintips(): if tip["hash"] == blockhash: assert tip["status"] != "active" return tip["status"] == "parked" return False fork_tip = fork_node.getbestblockhash() self.wait_until(lambda: parked_block(fork_tip)) self.log.info("Answer all polls to finalize...") hash_to_find = int(fork_tip, 16) def has_accepted_new_tip(): can_find_block_in_poll(hash_to_find) return node.getbestblockhash() == fork_tip # Because everybody answers yes, the node will accept that block. self.wait_until(has_accepted_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) self.log.info("Answer all polls to park...") node.generate(1) tip_to_park = node.getbestblockhash() hash_to_find = int(tip_to_park, 16) assert (tip_to_park != fork_tip) def has_parked_new_tip(): can_find_block_in_poll(hash_to_find, AvalancheVoteError.PARKED) return node.getbestblockhash() == fork_tip # Because everybody answers no, the node will park that block. self.wait_until(has_parked_new_tip, timeout=15) assert_equal(node.getbestblockhash(), fork_tip) self.log.info( "Check the node is discouraging unexpected avaresponses.") with node.assert_debug_log( ['Misbehaving', 'peer=1 (0 -> 2): unexpected-ava-response']): # unknown voting round poll_node.send_avaresponse(round=2**32 - 1, votes=[], privkey=privkey)
def create_unsigned_pos_block(self, staking_prevouts, nTime=None, outNValue=10002, signStakeTx=True, bestBlockHash=None, coinStakePrevout=None): if not nTime: current_time = int(time.time()) + 15 nTime = current_time & 0xfffffff0 if not bestBlockHash: bestBlockHash = self.node.getbestblockhash() block_height = self.node.getblockcount() else: block_height = self.node.getblock(bestBlockHash)['height'] parent_block_stake_modifier = int( self.node.getblock(bestBlockHash)['modifier'], 16) parent_block_raw_hex = self.node.getblock(bestBlockHash, False) f = io.BytesIO(hex_str_to_bytes(parent_block_raw_hex)) parent_block = CBlock() parent_block.deserialize(f) coinbase = create_coinbase(block_height + 1) coinbase.vout[0].nValue = 0 coinbase.vout[0].scriptPubKey = b"" coinbase.rehash() block = create_block(int(bestBlockHash, 16), coinbase, nTime) block.hashPrevBlock = int(bestBlockHash, 16) if not block.solve_stake(parent_block_stake_modifier, staking_prevouts): return None # create a new private key used for block signing. block_sig_key = ECKey() block_sig_key.set(hash256(struct.pack('<I', 0)), False) pubkey = block_sig_key.get_pubkey().get_bytes() scriptPubKey = CScript([pubkey, OP_CHECKSIG]) stake_tx_unsigned = CTransaction() if not coinStakePrevout: coinStakePrevout = block.prevoutStake stake_tx_unsigned.vin.append(CTxIn(coinStakePrevout)) stake_tx_unsigned.vout.append(CTxOut()) stake_tx_unsigned.vout.append( CTxOut(int(outNValue * COIN), scriptPubKey)) stake_tx_unsigned.vout.append( CTxOut(int(outNValue * COIN), scriptPubKey)) if signStakeTx: stake_tx_signed_raw_hex = self.node.signrawtransactionwithwallet( bytes_to_hex_str(stake_tx_unsigned.serialize()))['hex'] f = io.BytesIO(hex_str_to_bytes(stake_tx_signed_raw_hex)) stake_tx_signed = CTransaction() stake_tx_signed.deserialize(f) block.vtx.append(stake_tx_signed) else: block.vtx.append(stake_tx_unsigned) block.hashMerkleRoot = block.calc_merkle_root() return (block, block_sig_key)
def run_test(self): node = self.nodes[0] node.add_p2p_connection(P2PDataStore()) # Set the blocksize to 2MB as initial condition node.setexcessiveblock(self.excessive_block_size) self.genesis_hash = int(node.getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # move the tip back to a previous block def tip(number): self.tip = self.blocks[number] # adds transactions to the block and updates state def update_block(block_number, new_transactions): block = self.blocks[block_number] self.add_transactions_to_block(block, new_transactions) old_sha256 = block.sha256 make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[ block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # shorthand for functions block = self.next_block # Create a new block block(0) save_spendable_output() node.p2p.send_blocks_and_test([self.tip], node) # Now we need that block to mature so we can spend the coinbase. maturity_blocks = [] for i in range(105): block(5000 + i) maturity_blocks.append(self.tip) save_spendable_output() node.p2p.send_blocks_and_test(maturity_blocks, node) # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(100): out.append(get_spendable_output()) # Accept many sigops lots_of_checksigs = CScript([OP_CHECKSIG] * MAX_BLOCK_SIGOPS_PER_MB) block(19, spend=out[0], script=lots_of_checksigs, block_size=ONE_MEGABYTE) node.p2p.send_blocks_and_test([self.tip], node) block(20, spend=out[1], script=lots_of_checksigs, block_size=ONE_MEGABYTE, extra_sigops=1) node.p2p.send_blocks_and_test([self.tip], node, success=False, reject_reason='bad-blk-sigops') # Rewind bad block tip(19) # Accept 40k sigops per block > 1MB and <= 2MB block(21, spend=out[1], script=lots_of_checksigs, extra_sigops=MAX_BLOCK_SIGOPS_PER_MB, block_size=ONE_MEGABYTE + 1) node.p2p.send_blocks_and_test([self.tip], node) # Accept 40k sigops per block > 1MB and <= 2MB block(22, spend=out[2], script=lots_of_checksigs, extra_sigops=MAX_BLOCK_SIGOPS_PER_MB, block_size=2 * ONE_MEGABYTE) node.p2p.send_blocks_and_test([self.tip], node) # Reject more than 40k sigops per block > 1MB and <= 2MB. block(23, spend=out[3], script=lots_of_checksigs, extra_sigops=MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=ONE_MEGABYTE + 1) node.p2p.send_blocks_and_test([self.tip], node, success=False, reject_reason='bad-blk-sigops') # Rewind bad block tip(22) # Reject more than 40k sigops per block > 1MB and <= 2MB. block(24, spend=out[3], script=lots_of_checksigs, extra_sigops=MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=2 * ONE_MEGABYTE) node.p2p.send_blocks_and_test([self.tip], node, success=False, reject_reason='bad-blk-sigops') # Rewind bad block tip(22) # Accept 60k sigops per block > 2MB and <= 3MB block(25, spend=out[3], script=lots_of_checksigs, extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB, block_size=2 * ONE_MEGABYTE + 1) node.p2p.send_blocks_and_test([self.tip], node) # Accept 60k sigops per block > 2MB and <= 3MB block(26, spend=out[4], script=lots_of_checksigs, extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB, block_size=3 * ONE_MEGABYTE) node.p2p.send_blocks_and_test([self.tip], node) # Reject more than 40k sigops per block > 1MB and <= 2MB. block(27, spend=out[5], script=lots_of_checksigs, extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=2 * ONE_MEGABYTE + 1) node.p2p.send_blocks_and_test([self.tip], node, success=False, reject_reason='bad-blk-sigops') # Rewind bad block tip(26) # Reject more than 40k sigops per block > 1MB and <= 2MB. block(28, spend=out[5], script=lots_of_checksigs, extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=3 * ONE_MEGABYTE) node.p2p.send_blocks_and_test([self.tip], node, success=False, reject_reason='bad-blk-sigops') # Rewind bad block tip(26) # Too many sigops in one txn too_many_tx_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB + 1)) block(29, spend=out[6], script=too_many_tx_checksigs, block_size=ONE_MEGABYTE + 1) node.p2p.send_blocks_and_test([self.tip], node, success=False, reject_reason='bad-txn-sigops') # Rewind bad block tip(26) # Generate a key pair to test P2SH sigops count private_key = ECKey() private_key.generate() public_key = private_key.get_pubkey().get_bytes() # P2SH # Build the redeem script, hash it, use hash to create the p2sh script redeem_script = CScript([public_key] + [OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG]) redeem_script_hash = hash160(redeem_script) p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) # Create a p2sh transaction p2sh_tx = self.create_tx(out[6], 1, p2sh_script) # Add the transaction to the block block(30) update_block(30, [p2sh_tx]) node.p2p.send_blocks_and_test([self.tip], node) # Creates a new transaction using the p2sh transaction included in the # last block def spend_p2sh_tx(output_script=CScript([OP_TRUE])): # Create the transaction spent_p2sh_tx = CTransaction() spent_p2sh_tx.vin.append(CTxIn(COutPoint(p2sh_tx.sha256, 0), b'')) spent_p2sh_tx.vout.append(CTxOut(1, output_script)) # Sign the transaction using the redeem script sighash = SignatureHashForkId(redeem_script, spent_p2sh_tx, 0, SIGHASH_ALL | SIGHASH_FORKID, p2sh_tx.vout[0].nValue) sig = private_key.sign_ecdsa(sighash) + \ bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])) spent_p2sh_tx.vin[0].scriptSig = CScript([sig, redeem_script]) spent_p2sh_tx.rehash() return spent_p2sh_tx # Sigops p2sh limit p2sh_sigops_limit = MAX_BLOCK_SIGOPS_PER_MB - \ redeem_script.GetSigOpCount(True) # Too many sigops in one p2sh txn too_many_p2sh_sigops = CScript([OP_CHECKSIG] * (p2sh_sigops_limit + 1)) block(31, spend=out[7], block_size=ONE_MEGABYTE + 1) update_block(31, [spend_p2sh_tx(too_many_p2sh_sigops)]) node.p2p.send_blocks_and_test([self.tip], node, success=False, reject_reason='bad-txn-sigops') # Rewind bad block tip(30) # Max sigops in one p2sh txn max_p2sh_sigops = CScript([OP_CHECKSIG] * (p2sh_sigops_limit)) block(32, spend=out[8], block_size=ONE_MEGABYTE + 1) update_block(32, [spend_p2sh_tx(max_p2sh_sigops)]) node.p2p.send_blocks_and_test([self.tip], node) # Ensure that a coinbase with too many sigops is forbidden, even if it # doesn't push the total block count over the limit. b33 = block(33, spend=out[9], block_size=2 * ONE_MEGABYTE) # 20001 sigops b33.vtx[0].vout.append( CTxOut(0, CScript([OP_CHECKMULTISIG] * 1000 + [OP_CHECKDATASIG]))) update_block(33, []) node.p2p.send_blocks_and_test([b33], node, success=False, reject_reason='bad-txn-sigops') # 20000 sigops b33.vtx[0].vout[-1].scriptPubKey = CScript([OP_CHECKMULTISIG] * 1000) update_block(33, []) node.p2p.send_blocks_and_test([b33], node)
def run_test(self): self.log.info("Mining blocks...") self.nodes[0].generate(101) self.sync_all() # address address1 = self.nodes[0].getnewaddress() # pubkey address2 = self.nodes[0].getnewaddress() # privkey eckey = ECKey() eckey.generate() address3_privkey = bytes_to_wif(eckey.get_bytes()) address3 = key_to_p2wpkh(eckey.get_pubkey().get_bytes()) self.nodes[0].importprivkey(address3_privkey) # Check only one address address_info = self.nodes[0].getaddressinfo(address1) assert_equal(address_info['ismine'], True) self.sync_all() # Node 1 sync test assert_equal(self.nodes[1].getblockcount(), 101) # Address Test - before import address_info = self.nodes[1].getaddressinfo(address1) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) address_info = self.nodes[1].getaddressinfo(address2) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) address_info = self.nodes[1].getaddressinfo(address3) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) # Send funds to self txnid1 = self.nodes[0].sendtoaddress(address1, 0.1) self.nodes[0].generate(1) rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex'] proof1 = self.nodes[0].gettxoutproof([txnid1]) txnid2 = self.nodes[0].sendtoaddress(address2, 0.05) self.nodes[0].generate(1) rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex'] proof2 = self.nodes[0].gettxoutproof([txnid2]) txnid3 = self.nodes[0].sendtoaddress(address3, 0.025) self.nodes[0].generate(1) rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex'] proof3 = self.nodes[0].gettxoutproof([txnid3]) self.sync_all() # Import with no affiliated address assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1) balance1 = self.nodes[1].getbalance() assert_equal(balance1, Decimal(0)) # Import with affiliated address with no rescan self.nodes[1].createwallet('wwatch', disable_private_keys=True) wwatch = self.nodes[1].get_wallet_rpc('wwatch') wwatch.importaddress(address=address2, rescan=False) wwatch.importprunedfunds(rawtransaction=rawtxn2, txoutproof=proof2) assert [tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2] # Import with private key with no rescan w1 = self.nodes[1].get_wallet_rpc(self.default_wallet_name) w1.importprivkey(privkey=address3_privkey, rescan=False) w1.importprunedfunds(rawtxn3, proof3) assert [tx for tx in w1.listtransactions() if tx['txid'] == txnid3] balance3 = w1.getbalance() assert_equal(balance3, Decimal('0.025')) # Addresses Test - after import address_info = w1.getaddressinfo(address1) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) address_info = wwatch.getaddressinfo(address2) if self.options.descriptors: assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], True) else: assert_equal(address_info['iswatchonly'], True) assert_equal(address_info['ismine'], False) address_info = w1.getaddressinfo(address3) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], True) # Remove transactions assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", w1.removeprunedfunds, txnid1) assert not [tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid1] wwatch.removeprunedfunds(txnid2) assert not [tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2] w1.removeprunedfunds(txnid3) assert not [tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid3]
def run_test(self): node = self.nodes[0] addrkey0 = node.get_deterministic_priv_key() blockhashes = node.generatetoaddress(2, addrkey0.address) stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key) privkey = ECKey() privkey.generate() wif_privkey = bytes_to_wif(privkey.get_bytes()) def check_buildavalancheproof_error(error_code, error_message, stakes, master_key=wif_privkey): assert_raises_rpc_error( error_code, error_message, node.buildavalancheproof, # Sequence 0, # Expiration 0, master_key, stakes, ) good_stake = stakes[0] self.log.info("Error cases") check_buildavalancheproof_error(-8, "Invalid master key", [good_stake], master_key=bytes_to_wif(b'f00')) negative_vout = good_stake.copy() negative_vout['vout'] = -1 check_buildavalancheproof_error( -22, "vout cannot be negative", [negative_vout], ) zero_height = good_stake.copy() zero_height['height'] = 0 check_buildavalancheproof_error( -22, "height must be positive", [zero_height], ) negative_height = good_stake.copy() negative_height['height'] = -1 check_buildavalancheproof_error( -22, "height must be positive", [negative_height], ) missing_amount = good_stake.copy() del missing_amount['amount'] check_buildavalancheproof_error( -8, "Missing amount", [missing_amount], ) invalid_privkey = good_stake.copy() invalid_privkey['privatekey'] = 'foobar' check_buildavalancheproof_error( -8, "Invalid private key", [invalid_privkey], ) duplicate_stake = [good_stake] * 2 check_buildavalancheproof_error( -8, "Duplicated stake", duplicate_stake, ) self.log.info("Happy path") assert node.buildavalancheproof(0, 0, wif_privkey, [good_stake]) self.log.info("Check the payout address") self.restart_node(0, extra_args=self.extra_args[0] + ['-legacyavaproof=0']) assert_raises_rpc_error( -8, "A payout address is required if `-legacyavaproof` is false", node.buildavalancheproof, 0, 0, wif_privkey, [good_stake], ) assert_raises_rpc_error( -8, "Invalid payout address", node.buildavalancheproof, 0, 0, wif_privkey, [good_stake], "ecregtest:qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqcrl5mqkq", ) # Happy path node.buildavalancheproof(0, 0, wif_privkey, [good_stake], ADDRESS_ECREG_UNSPENDABLE)
def run_test(self): node = self.nodes[0] addrkey0 = node.get_deterministic_priv_key() blockhashes = node.generatetoaddress(2, addrkey0.address) stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key) privkey = ECKey() privkey.generate() wif_privkey = bytes_to_wif(privkey.get_bytes()) proof_master = privkey.get_pubkey().get_bytes().hex() proof_sequence = 42 proof_expiration = 2000000000 proof = node.buildavalancheproof(proof_sequence, proof_expiration, wif_privkey, stakes) nodeid = add_interface_node(node) def check_addavalanchenode_error(error_code, error_message, nodeid=nodeid, proof=proof, pubkey=proof_master, delegation=None): assert_raises_rpc_error( error_code, error_message, node.addavalanchenode, nodeid, pubkey, proof, delegation, ) self.log.info("Invalid proof") check_addavalanchenode_error(-22, "Proof must be an hexadecimal string", proof="not a proof") check_addavalanchenode_error(-22, "Proof has invalid format", proof="f000") no_stake = node.buildavalancheproof(proof_sequence, proof_expiration, wif_privkey, []) check_addavalanchenode_error(-8, "The proof is invalid: no-stake", proof=no_stake) self.log.info("Node doesn't exist") check_addavalanchenode_error(-8, f"The node does not exist: {nodeid + 1}", nodeid=nodeid + 1) self.log.info("Invalid delegation") dg_privkey = ECKey() dg_privkey.generate() dg_pubkey = dg_privkey.get_pubkey().get_bytes() check_addavalanchenode_error( -22, "Delegation must be an hexadecimal string", pubkey=dg_pubkey.hex(), delegation="not a delegation") check_addavalanchenode_error(-22, "Delegation has invalid format", pubkey=dg_pubkey.hex(), delegation="f000") self.log.info("Delegation mismatch with the proof") delegation_wrong_proofid = AvalancheDelegation() check_addavalanchenode_error( -8, "The delegation does not match the proof", pubkey=dg_pubkey.hex(), delegation=delegation_wrong_proofid.serialize().hex()) proofobj = FromHex(LegacyAvalancheProof(), proof) delegation = AvalancheDelegation( limited_proofid=proofobj.limited_proofid, proof_master=proofobj.master, ) self.log.info("Delegation with bad signature") bad_level = AvalancheDelegationLevel(pubkey=dg_pubkey, ) delegation.levels.append(bad_level) check_addavalanchenode_error(-8, "The delegation is invalid", pubkey=dg_pubkey.hex(), delegation=delegation.serialize().hex()) delegation.levels = [] level = AvalancheDelegationLevel(pubkey=dg_pubkey, sig=privkey.sign_schnorr( hash256(delegation.getid() + ser_string(dg_pubkey)))) delegation.levels.append(level) self.log.info("Key mismatch with the proof") check_addavalanchenode_error( -5, "The public key does not match the proof", pubkey=dg_pubkey.hex(), ) self.log.info("Key mismatch with the delegation") random_privkey = ECKey() random_privkey.generate() random_pubkey = random_privkey.get_pubkey() check_addavalanchenode_error( -5, "The public key does not match the delegation", pubkey=random_pubkey.get_bytes().hex(), delegation=delegation.serialize().hex(), ) self.log.info("Happy path") assert node.addavalanchenode(nodeid, proof_master, proof) # Adding several times is OK assert node.addavalanchenode(nodeid, proof_master, proof) # Use an hardcoded proof. This will help detecting proof format changes. # Generated using: # stakes = create_coinbase_stakes(node, [blockhashes[1]], addrkey0.key) # hardcoded_proof = node.buildavalancheproof( # proof_sequence, proof_expiration, random_pubkey, stakes) hardcoded_pubkey = "037d20fcfe118296bb53f0a8f87c864e7b9831c4fcd7c6a0bb9a58e0e0f53d5cbc" hardcoded_proof = ( "2a00000000000000009435770000000021037d20fcfe118296bb53f0a8f87c864e" "7b9831c4fcd7c6a0bb9a58e0e0f53d5cbc01683ef49024cf25bb55775b327f5e68" "c79da3a7824dc03df5623c96f4a60158f90000000000f902950000000095010000" "210227d85ba011276cf25b51df6a188b75e604b38770a462b2d0e9fb2fc839ef5d" "3f612834ef0e2545d6359e9f34967c2bb69cb88fe246fed716d998f3f62eba1ef6" "6a547606a7ac14c1b5697f4acc20853b3f99954f4f7b6e9bf8a085616d3adfc7") assert node.addavalanchenode(nodeid, hardcoded_pubkey, hardcoded_proof) self.log.info("Add a node with a valid delegation") assert node.addavalanchenode( nodeid, dg_pubkey.hex(), proof, delegation.serialize().hex(), ) self.log.info("Several nodes can share a proof") nodeid2 = add_interface_node(node) assert node.addavalanchenode(nodeid2, proof_master, proof)
def run_test(self): node, = self.nodes self.bootstrap_p2p() tip = self.getbestblock(node) self.log.info("Create some blocks with OP_1 coinbase for spending.") blocks = [] for _ in range(10): tip = self.build_block(tip) blocks.append(tip) node.p2p.send_blocks_and_test(blocks, node, success=True) spendable_outputs = [block.vtx[0] for block in blocks] self.log.info("Mature the blocks and get out of IBD.") self.generatetoaddress(node, 100, node.get_deterministic_priv_key().address) tip = self.getbestblock(node) self.log.info("Setting up spends to test and mining the fundings.") fundings = [] # Generate a key pair privkeybytes = b"Schnorr!" * 4 private_key = ECKey() private_key.set(privkeybytes, True) # get uncompressed public key serialization public_key = private_key.get_pubkey().get_bytes() def create_fund_and_spend_tx(multi=False, sig='schnorr'): spendfrom = spendable_outputs.pop() if multi: script = CScript([OP_1, public_key, OP_1, OP_CHECKMULTISIG]) else: script = CScript([public_key, OP_CHECKSIG]) value = spendfrom.vout[0].nValue # Fund transaction txfund = create_tx_with_script(spendfrom, 0, b'', value, script) txfund.rehash() fundings.append(txfund) # Spend transaction txspend = CTransaction() txspend.vout.append(CTxOut(value - 1000, CScript([OP_TRUE]))) txspend.vin.append(CTxIn(COutPoint(txfund.sha256, 0), b'')) # Sign the transaction sighashtype = SIGHASH_ALL | SIGHASH_FORKID hashbyte = bytes([sighashtype & 0xff]) sighash = SignatureHashForkId(script, txspend, 0, sighashtype, value) if sig == 'schnorr': txsig = schnorr.sign(privkeybytes, sighash) + hashbyte elif sig == 'ecdsa': txsig = private_key.sign_ecdsa(sighash) + hashbyte elif isinstance(sig, bytes): txsig = sig + hashbyte if multi: txspend.vin[0].scriptSig = CScript([b'', txsig]) else: txspend.vin[0].scriptSig = CScript([txsig]) txspend.rehash() return txspend schnorrchecksigtx = create_fund_and_spend_tx() schnorrmultisigtx = create_fund_and_spend_tx(multi=True) ecdsachecksigtx = create_fund_and_spend_tx(sig='ecdsa') sig64checksigtx = create_fund_and_spend_tx(sig=sig64) sig64multisigtx = create_fund_and_spend_tx(multi=True, sig=sig64) tip = self.build_block(tip, fundings) node.p2p.send_blocks_and_test([tip], node) self.log.info("Typical ECDSA and Schnorr CHECKSIG are valid.") node.p2p.send_txs_and_test([schnorrchecksigtx, ecdsachecksigtx], node) # They get mined as usual. self.generatetoaddress(node, 1, node.get_deterministic_priv_key().address) tip = self.getbestblock(node) # Make sure they are in the block, and mempool is now empty. txhashes = set([schnorrchecksigtx.hash, ecdsachecksigtx.hash]) assert txhashes.issubset(tx.rehash() for tx in tip.vtx) assert not node.getrawmempool() self.log.info("Schnorr in multisig is rejected with mandatory error.") assert_raises_rpc_error(-26, SCHNORR_MULTISIG_ERROR, node.sendrawtransaction, ToHex(schnorrmultisigtx)) # And it is banworthy. self.check_for_ban_on_rejected_tx(schnorrmultisigtx, SCHNORR_MULTISIG_ERROR) # And it can't be mined self.check_for_ban_on_rejected_block( self.build_block(tip, [schnorrmultisigtx]), BADINPUTS_ERROR) self.log.info("Bad 64-byte sig is rejected with mandatory error.") # In CHECKSIG it's invalid Schnorr and hence NULLFAIL. assert_raises_rpc_error(-26, NULLFAIL_ERROR, node.sendrawtransaction, ToHex(sig64checksigtx)) # In CHECKMULTISIG it's invalid length and hence BAD_LENGTH. assert_raises_rpc_error(-26, SCHNORR_MULTISIG_ERROR, node.sendrawtransaction, ToHex(sig64multisigtx)) # Sending these transactions is banworthy. self.check_for_ban_on_rejected_tx(sig64checksigtx, NULLFAIL_ERROR) self.check_for_ban_on_rejected_tx(sig64multisigtx, SCHNORR_MULTISIG_ERROR) # And they can't be mined either... self.check_for_ban_on_rejected_block( self.build_block(tip, [sig64checksigtx]), BADINPUTS_ERROR) self.check_for_ban_on_rejected_block( self.build_block(tip, [sig64multisigtx]), BADINPUTS_ERROR)
class MiniWallet: def __init__(self, test_node, *, mode=MiniWalletMode.ADDRESS_OP_TRUE): self._test_node = test_node self._utxos = [] self._priv_key = None self._address = None assert isinstance(mode, MiniWalletMode) if mode == MiniWalletMode.RAW_OP_TRUE: self._scriptPubKey = bytes(CScript([OP_TRUE])) elif mode == MiniWalletMode.RAW_P2PK: # use simple deterministic private key (k=1) self._priv_key = ECKey() self._priv_key.set((1).to_bytes(32, 'big'), True) pub_key = self._priv_key.get_pubkey() self._scriptPubKey = bytes( CScript([pub_key.get_bytes(), OP_CHECKSIG])) elif mode == MiniWalletMode.ADDRESS_OP_TRUE: self._address = ADDRESS_BCRT1_P2WSH_OP_TRUE self._scriptPubKey = hex_str_to_bytes( self._test_node.validateaddress(self._address)['scriptPubKey']) def scan_blocks(self, *, start=1, num): """Scan the blocks for self._address outputs and add them to self._utxos""" for i in range(start, start + num): block = self._test_node.getblock( blockhash=self._test_node.getblockhash(i), verbosity=2) for tx in block['tx']: self.scan_tx(tx) def scan_tx(self, tx): """Scan the tx for self._scriptPubKey outputs and add them to self._utxos""" for out in tx['vout']: if out['scriptPubKey']['hex'] == self._scriptPubKey.hex(): self._utxos.append({ 'txid': tx['txid'], 'vout': out['n'], 'value': out['value'] }) def sign_tx(self, tx, fixed_length=True): """Sign tx that has been created by MiniWallet in P2PK mode""" assert self._priv_key is not None (sighash, err) = LegacySignatureHash(CScript(self._scriptPubKey), tx, 0, SIGHASH_ALL) assert err is None # for exact fee calculation, create only signatures with fixed size by default (>49.89% probability): # 65 bytes: high-R val (33 bytes) + low-S val (32 bytes) # with the DER header/skeleton data of 6 bytes added, this leads to a target size of 71 bytes der_sig = b'' while not len(der_sig) == 71: der_sig = self._priv_key.sign_ecdsa(sighash) if not fixed_length: break tx.vin[0].scriptSig = CScript( [der_sig + bytes(bytearray([SIGHASH_ALL]))]) def generate(self, num_blocks): """Generate blocks with coinbase outputs to the internal address, and append the outputs to the internal list""" blocks = self._test_node.generatetodescriptor( num_blocks, f'raw({self._scriptPubKey.hex()})') for b in blocks: cb_tx = self._test_node.getblock(blockhash=b, verbosity=2)['tx'][0] self._utxos.append({ 'txid': cb_tx['txid'], 'vout': 0, 'value': cb_tx['vout'][0]['value'] }) return blocks def get_address(self): return self._address def get_utxo(self, *, txid: Optional[str] = '', mark_as_spent=True): """ Returns a utxo and marks it as spent (pops it from the internal list) Args: txid: get the first utxo we find from a specific transaction Note: Can be used to get the change output immediately after a send_self_transfer """ index = -1 # by default the last utxo if txid: utxo = next(filter(lambda utxo: txid == utxo['txid'], self._utxos)) index = self._utxos.index(utxo) if mark_as_spent: return self._utxos.pop(index) else: return self._utxos[index] def send_self_transfer(self, **kwargs): """Create and send a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed.""" tx = self.create_self_transfer(**kwargs) self.sendrawtransaction(from_node=kwargs['from_node'], tx_hex=tx['hex']) return tx def create_self_transfer(self, *, fee_rate=Decimal("0.003"), from_node, utxo_to_spend=None, mempool_valid=True, locktime=0, sequence=0): """Create and return a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed.""" self._utxos = sorted(self._utxos, key=lambda k: k['value']) utxo_to_spend = utxo_to_spend or self._utxos.pop( ) # Pick the largest utxo (if none provided) and hope it covers the fee if self._priv_key is None: vsize = Decimal(96) # anyone-can-spend else: vsize = Decimal( 168 ) # P2PK (73 bytes scriptSig + 35 bytes scriptPubKey + 60 bytes other) send_value = satoshi_round(utxo_to_spend['value'] - fee_rate * (vsize / 1000)) fee = utxo_to_spend['value'] - send_value assert send_value > 0 tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(utxo_to_spend['txid'], 16), utxo_to_spend['vout']), nSequence=sequence) ] tx.vout = [CTxOut(int(send_value * COIN), self._scriptPubKey)] tx.nLockTime = locktime if not self._address: # raw script if self._priv_key is not None: # P2PK, need to sign self.sign_tx(tx) else: # anyone-can-spend tx.vin[0].scriptSig = CScript([OP_NOP] * 35) # pad to identical size else: tx.wit.vtxinwit = [CTxInWitness()] tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])] tx_hex = tx.serialize().hex() tx_info = from_node.testmempoolaccept([tx_hex])[0] assert_equal(mempool_valid, tx_info['allowed']) if mempool_valid: assert_equal(tx_info['vsize'], vsize) assert_equal(tx_info['fees']['base'], fee) return { 'txid': tx_info['txid'], 'wtxid': tx_info['wtxid'], 'hex': tx_hex, 'tx': tx } def sendrawtransaction(self, *, from_node, tx_hex): from_node.sendrawtransaction(tx_hex) self.scan_tx(from_node.decoderawtransaction(tx_hex))
def run_test(self): # Connect to node0 node0 = BaseNode() connections = [ NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0) ] node0.add_connection(connections[0]) NetworkThread().start() # Start up network handling in another thread node0.wait_for_verack() # Build the blockchain self.tip = int(self.nodes[0].getbestblockhash(), 16) self.block_time = self.nodes[0].getblock( self.nodes[0].getbestblockhash())['time'] + 1 self.blocks = [] # Get a pubkey for the coinbase TXO coinbase_key = ECKey() coinbase_key.generate() coinbase_pubkey = coinbase_key.get_pubkey().get_bytes() # Create the first block with a coinbase output to our key height = 1 block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time) self.blocks.append(block) self.block_time += 1 block.solve() # Save the coinbase for later self.block1 = block self.tip = block.x16r height += 1 # Bury the block 100 deep so the coinbase output is spendable for i in range(100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() self.blocks.append(block) self.tip = block.x16r self.block_time += 1 height += 1 # Create a transaction spending the coinbase output with an invalid (null) signature tx = CTransaction() tx.vin.append( CTxIn(COutPoint(self.block1.vtx[0].x16r, 0), script_sig=b"")) tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE]))) tx.calc_x16r() block102 = create_block(self.tip, create_coinbase(height), self.block_time) self.block_time += 1 block102.vtx.extend([tx]) block102.hashMerkleRoot = block102.calc_merkle_root() block102.rehash() block102.solve() self.blocks.append(block102) self.tip = block102.x16r self.block_time += 1 height += 1 # Bury the assumed valid block 2100 deep for i in range(2100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.nVersion = 4 block.solve() self.blocks.append(block) self.tip = block.x16r self.block_time += 1 height += 1 # Start node1 and node2 with assumevalid so they accept a block with a bad signature. self.start_node(1, extra_args=["-assumevalid=" + hex(block102.x16r)]) node1 = BaseNode() # connects to node1 connections.append( NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], node1)) node1.add_connection(connections[1]) node1.wait_for_verack() self.start_node(2, extra_args=["-assumevalid=" + hex(block102.x16r)]) node2 = BaseNode() # connects to node2 connections.append( NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2)) node2.add_connection(connections[2]) node2.wait_for_verack() # send header lists to all three nodes node0.send_header_for_blocks(self.blocks[0:2000]) node0.send_header_for_blocks(self.blocks[2000:]) node1.send_header_for_blocks(self.blocks[0:2000]) node1.send_header_for_blocks(self.blocks[2000:]) node2.send_header_for_blocks(self.blocks[0:200]) # Send blocks to node0. Block 102 will be rejected. self.send_blocks_until_disconnected(node0) self.assert_blockchain_height(self.nodes[0], 101) # Send all blocks to node1. All blocks will be accepted. for i in range(2202): node1.send_message(MsgBlock(self.blocks[i])) # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync. node1.sync_with_ping(120) assert_equal( self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202) # Send blocks to node2. Block 102 will be rejected. self.send_blocks_until_disconnected(node2) self.assert_blockchain_height(self.nodes[2], 101)
def generate_wif_key(): # Makes a WIF privkey for imports k = ECKey() k.generate() return bytes_to_wif(k.get_bytes(), k.is_compressed)
def run_test(self): node = self.nodes[0] self.log.info('Start with empty mempool, and 200 blocks') self.mempool_size = 0 assert_equal(node.getblockcount(), 200) assert_equal(node.getmempoolinfo()['size'], self.mempool_size) coins = node.listunspent() self.log.info('Should not accept garbage to testmempoolaccept') assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar')) assert_raises_rpc_error(-8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22'])) assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar'])) self.log.info('A transaction already in the blockchain') coin = coins.pop() # Pick a random coin(base) to spend raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{'txid': coin['txid'], 'vout': coin['vout']}], outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}, {"fee": coin["amount"] - Decimal('49.3')}], ))['hex'] txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, maxfeerate=0) node.generate(1) self.mempool_size = 0 self.check_mempool_result( result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known'}], rawtxs=[raw_tx_in_block], ) self.log.info('A transaction not in the mempool') fee = Decimal('0.000007') raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}], # RBF is used later outputs=[{node.getnewaddress(): Decimal('0.3') - fee}, {"fee": fee}], ))['hex'] tx = CTransaction() tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) txid_0 = tx.rehash() self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee}}], rawtxs=[raw_tx_0], ) self.log.info('A final transaction not in the mempool') coin = coins.pop() # Pick a random coin(base) to spend output_amount = Decimal('0.025') raw_tx_final = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff}], # SEQUENCE_FINAL outputs=[{node.getnewaddress(): output_amount}, {"fee": coin["amount"] - Decimal(str(output_amount))}], locktime=node.getblockcount() + 2000, # Can be anything ))['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final))) fee_expected = coin['amount'] - output_amount self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee_expected}}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0) self.mempool_size += 1 self.log.info('A transaction in the mempool') node.sendrawtransaction(hexstring=raw_tx_0) self.mempool_size += 1 self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'txn-already-in-mempool'}], rawtxs=[raw_tx_0], ) self.log.info('A transaction that replaces a mempool transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vout[0].nValue.setToAmount(tx.vout[0].nValue.getAmount() - int(fee * COIN)) # Double the fee txid_0_out = tx.vout[0].nValue.getAmount() tx.vout[1].nValue.setToAmount(tx.vout[1].nValue.getAmount() + int(fee * COIN)) tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1 # Now, opt out of RBF raw_tx_0 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) txid_0 = tx.rehash() self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': (2 * fee)}}], rawtxs=[raw_tx_0], ) self.log.info('A transaction that conflicts with an unconfirmed tx') # Send the transaction that replaces the mempool transaction and opts out of replaceability node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0) # take original raw_tx_0 tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vout[0].nValue.setToAmount(tx.vout[0].nValue.getAmount() - int(4 * fee * COIN)) # Set more fee tx.vout[1].nValue.setToAmount(tx.vout[1].nValue.getAmount() + int(4 * fee * COIN)) # skip re-signing the tx self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'txn-mempool-conflict'}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) self.log.info('A transaction with missing inputs, that never existed') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14) # skip re-signing the tx self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with missing inputs, that existed once in the past') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vin[0].prevout.n = 1 # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend tx.vout[1].nValue.setToAmount(49*COIN - tx.vout[0].nValue.getAmount()) # fee txid_1_out = tx.vout[0].nValue.getAmount() raw_tx_1 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex'] txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0) # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them raw_tx_spend_both = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[ {'txid': txid_0, 'vout': 0}, {'txid': txid_1, 'vout': 0}, ], outputs=[{node.getnewaddress(): 0.1}, {"fee": Decimal(txid_0_out + txid_1_out)/Decimal(COIN) - Decimal('0.1')}] ))['hex'] txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, maxfeerate=0) node.generate(1) self.mempool_size = 0 # Now see if we can add the coins back to the utxo set by sending the exact txs again self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}], rawtxs=[raw_tx_0], ) self.check_mempool_result( result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}], rawtxs=[raw_tx_1], ) self.log.info('Create a signed "reference" tx for later use') raw_tx_reference = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{'txid': txid_spend_both, 'vout': 0}], outputs=[{node.getnewaddress(): 0.05}, {"fee": Decimal('0.1') - Decimal('0.05')}], ))['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) # Reference tx should be valid on itself self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': { 'base': Decimal('0.1') - Decimal('0.05')}}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) self.log.info('A transaction with no outputs') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout = [] # Skip re-signing the transaction for context independent checks from now on # tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(tx.serialize().hex())['hex']))) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-empty'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A really large transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin = [tx.vin[0]] * math.ceil(MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize())) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-oversize'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with negative output value') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].nValue.setToAmount(tx.vout[0].nValue.getAmount() * -1) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-negative'}], rawtxs=[tx.serialize().hex()], ) # The following two validations prevent overflow of the output amounts (see CVE-2010-5139). self.log.info('A transaction with too large output value') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].nValue = CTxOutValue(MAX_MONEY + 1) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-toolarge'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with too large sum of output values') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout = [tx.vout[0]] * 2 tx.vout[0].nValue = CTxOutValue(MAX_MONEY) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-txouttotal-toolarge'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with duplicate inputs') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin = [tx.vin[0]] * 2 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-inputs-duplicate'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A coinbase transaction') # Pick the input of the first tx we signed, so it has to be a coinbase tx raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid']) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent))) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'coinbase'}], rawtxs=[tx.serialize().hex()], ) self.log.info('Some nonstandard transactions') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.nVersion = 3 # A version currently non-standard self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'version'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptpubkey'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) key = ECKey() key.generate() pubkey = key.get_pubkey().get_bytes() tx.vout[0].scriptPubKey = CScript([OP_2, pubkey, pubkey, pubkey, OP_3, OP_CHECKMULTISIG]) # Some bare multisig script (2-of-3) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bare-multisig'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].scriptSig = CScript([OP_HASH160]) # Some not-pushonly scriptSig self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-not-pushonly'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].scriptSig = CScript([b'a' * 1648]) # Some too large scriptSig (>1650 bytes) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript([OP_HASH160, hash160(b'burn'), OP_EQUAL])) num_scripts = 100000 // len(output_p2sh_burn.serialize()) # Use enough outputs to make the tx too large for our policy tx.vout = [output_p2sh_burn] * num_scripts self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0] = output_p2sh_burn tx.vout[0].nValue.setToAmount(tx.vout[0].nValue.getAmount() - 1) # Make output smaller, such that it is dust for our policy self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}], rawtxs=[tx.serialize().hex()], ) # Elements: We allow multi op_return outputs by default. This still fails because relay fee isn't met tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff']) tx.vout = [tx.vout[0]] * 2 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'min relay fee not met'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A timelocked transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].nSequence -= 1 # Should be non-max, so locktime is not ignored tx.nLockTime = node.getblockcount() + 1 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-final'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction that is locked by BIP68 sequence logic') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].nSequence = 2 # We could include it in the second block mined from now, but not the very next one # Can skip re-signing the tx because of early rejection self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final'}], rawtxs=[tx.serialize().hex()], maxfeerate=0, )
def random_p2wpkh(): """Generate a random P2WPKH scriptPubKey. Can be used when a random destination is needed, but no compiled wallet is available (e.g. as replacement to the getnewaddress RPC).""" key = ECKey() key.generate() return key_to_p2wpkh_script(key.get_pubkey().get_bytes())
def run_test(self): node = self.nodes[0] node.generate(1) # Leave IBD for sethdseed self.nodes[0].createwallet(wallet_name='w0') w0 = node.get_wallet_rpc('w0') address1 = w0.getnewaddress() self.log.info("Test disableprivatekeys creation.") self.nodes[0].createwallet(wallet_name='w1', disable_private_keys=True) w1 = node.get_wallet_rpc('w1') assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w1.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w1.getrawchangeaddress) w1.importpubkey(w0.getaddressinfo(address1)['pubkey']) self.log.info('Test that private keys cannot be imported') eckey = ECKey() eckey.generate() privkey = bytes_to_wif(eckey.get_bytes()) assert_raises_rpc_error(-4, 'Cannot import private keys to a wallet with private keys disabled', w1.importprivkey, privkey) if self.options.descriptors: result = w1.importdescriptors([{'desc': descsum_create('wpkh(' + privkey + ')'), 'timestamp': 'now'}]) else: result = w1.importmulti([{'scriptPubKey': {'address': key_to_p2wpkh(eckey.get_pubkey().get_bytes())}, 'timestamp': 'now', 'keys': [privkey]}]) assert not result[0]['success'] assert 'warning' not in result[0] assert_equal(result[0]['error']['code'], -4) assert_equal(result[0]['error']['message'], 'Cannot import private keys to a wallet with private keys disabled') self.log.info("Test blank creation with private keys disabled.") self.nodes[0].createwallet(wallet_name='w2', disable_private_keys=True, blank=True) w2 = node.get_wallet_rpc('w2') assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w2.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w2.getrawchangeaddress) w2.importpubkey(w0.getaddressinfo(address1)['pubkey']) self.log.info("Test blank creation with private keys enabled.") self.nodes[0].createwallet(wallet_name='w3', disable_private_keys=False, blank=True) w3 = node.get_wallet_rpc('w3') assert_equal(w3.getwalletinfo()['keypoolsize'], 0) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w3.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w3.getrawchangeaddress) # Import private key w3.importprivkey(generate_wif_key()) # Imported private keys are currently ignored by the keypool assert_equal(w3.getwalletinfo()['keypoolsize'], 0) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w3.getnewaddress) # Set the seed if self.options.descriptors: w3.importdescriptors([{ 'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/0h/*)'), 'timestamp': 'now', 'active': True }, { 'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/1h/*)'), 'timestamp': 'now', 'active': True, 'internal': True }]) else: w3.sethdseed() assert_equal(w3.getwalletinfo()['keypoolsize'], 1) w3.getnewaddress() w3.getrawchangeaddress() self.log.info("Test blank creation with privkeys enabled and then encryption") self.nodes[0].createwallet(wallet_name='w4', disable_private_keys=False, blank=True) w4 = node.get_wallet_rpc('w4') assert_equal(w4.getwalletinfo()['keypoolsize'], 0) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getrawchangeaddress) # Encrypt the wallet. Nothing should change about the keypool w4.encryptwallet('pass') assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getrawchangeaddress) # Now set a seed and it should work. Wallet should also be encrypted w4.walletpassphrase('pass', 60) if self.options.descriptors: w4.importdescriptors([{ 'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/0h/*)'), 'timestamp': 'now', 'active': True }, { 'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/1h/*)'), 'timestamp': 'now', 'active': True, 'internal': True }]) else: w4.sethdseed() w4.getnewaddress() w4.getrawchangeaddress() self.log.info("Test blank creation with privkeys disabled and then encryption") self.nodes[0].createwallet(wallet_name='w5', disable_private_keys=True, blank=True) w5 = node.get_wallet_rpc('w5') assert_equal(w5.getwalletinfo()['keypoolsize'], 0) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getrawchangeaddress) # Encrypt the wallet assert_raises_rpc_error(-16, "Error: wallet does not contain private keys, nothing to encrypt.", w5.encryptwallet, 'pass') assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getrawchangeaddress) self.log.info('New blank and encrypted wallets can be created') self.nodes[0].createwallet(wallet_name='wblank', disable_private_keys=False, blank=True, passphrase='thisisapassphrase') wblank = node.get_wallet_rpc('wblank') assert_raises_rpc_error(-13, "Error: Please enter the wallet passphrase with walletpassphrase first.", wblank.signmessage, "needanargument", "test") wblank.walletpassphrase('thisisapassphrase', 60) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", wblank.getnewaddress) assert_raises_rpc_error(-4, "Error: This wallet has no available keys", wblank.getrawchangeaddress) self.log.info('Test creating a new encrypted wallet.') # Born encrypted wallet is created (has keys) self.nodes[0].createwallet(wallet_name='w6', disable_private_keys=False, blank=False, passphrase='thisisapassphrase') w6 = node.get_wallet_rpc('w6') assert_raises_rpc_error(-13, "Error: Please enter the wallet passphrase with walletpassphrase first.", w6.signmessage, "needanargument", "test") w6.walletpassphrase('thisisapassphrase', 60) w6.signmessage(w6.getnewaddress('', 'legacy'), "test") w6.keypoolrefill(1) # There should only be 1 key for legacy, 3 for descriptors walletinfo = w6.getwalletinfo() keys = 3 if self.options.descriptors else 1 assert_equal(walletinfo['keypoolsize'], keys) assert_equal(walletinfo['keypoolsize_hd_internal'], keys) # Allow empty passphrase, but there should be a warning resp = self.nodes[0].createwallet(wallet_name='w7', disable_private_keys=False, blank=False, passphrase='') assert 'Empty string given as passphrase, wallet will not be encrypted.' in resp['warning'] w7 = node.get_wallet_rpc('w7') assert_raises_rpc_error(-15, 'Error: running with an unencrypted wallet, but walletpassphrase was called.', w7.walletpassphrase, '', 60) self.log.info('Test making a wallet with avoid reuse flag') self.nodes[0].createwallet('w8', False, False, '', True) # Use positional arguments to check for bug where avoid_reuse could not be set for wallets without needing them to be encrypted w8 = node.get_wallet_rpc('w8') assert_raises_rpc_error(-15, 'Error: running with an unencrypted wallet, but walletpassphrase was called.', w7.walletpassphrase, '', 60) assert_equal(w8.getwalletinfo()["avoid_reuse"], True) self.log.info('Using a passphrase with private keys disabled returns error') assert_raises_rpc_error(-4, 'Passphrase provided but private keys are disabled. A passphrase is only used to encrypt private keys, so cannot be used for wallets with private keys disabled.', self.nodes[0].createwallet, wallet_name='w9', disable_private_keys=True, passphrase='thisisapassphrase')
class MiniWallet: def __init__(self, test_node, *, mode=MiniWalletMode.ADDRESS_OP_TRUE): self._test_node = test_node self._utxos = [] self._priv_key = None self._address = None assert isinstance(mode, MiniWalletMode) if mode == MiniWalletMode.RAW_OP_TRUE: self._scriptPubKey = bytes(CScript([OP_TRUE])) elif mode == MiniWalletMode.RAW_P2PK: # use simple deterministic private key (k=1) self._priv_key = ECKey() self._priv_key.set((1).to_bytes(32, 'big'), True) pub_key = self._priv_key.get_pubkey() self._scriptPubKey = key_to_p2pk_script(pub_key.get_bytes()) elif mode == MiniWalletMode.ADDRESS_OP_TRUE: self._address, self._internal_key = create_deterministic_address_bcrt1_p2tr_op_true() self._scriptPubKey = bytes.fromhex(self._test_node.validateaddress(self._address)['scriptPubKey']) def rescan_utxos(self): """Drop all utxos and rescan the utxo set""" self._utxos = [] res = self._test_node.scantxoutset(action="start", scanobjects=[self.get_descriptor()]) assert_equal(True, res['success']) for utxo in res['unspents']: self._utxos.append({'txid': utxo['txid'], 'vout': utxo['vout'], 'value': utxo['amount'], 'height': utxo['height']}) def scan_tx(self, tx): """Scan the tx for self._scriptPubKey outputs and add them to self._utxos""" for out in tx['vout']: if out['scriptPubKey']['hex'] == self._scriptPubKey.hex(): self._utxos.append({'txid': tx['txid'], 'vout': out['n'], 'value': out['value'], 'height': 0}) def sign_tx(self, tx, fixed_length=True): """Sign tx that has been created by MiniWallet in P2PK mode""" assert self._priv_key is not None (sighash, err) = LegacySignatureHash(CScript(self._scriptPubKey), tx, 0, SIGHASH_ALL) assert err is None # for exact fee calculation, create only signatures with fixed size by default (>49.89% probability): # 65 bytes: high-R val (33 bytes) + low-S val (32 bytes) # with the DER header/skeleton data of 6 bytes added, this leads to a target size of 71 bytes der_sig = b'' while not len(der_sig) == 71: der_sig = self._priv_key.sign_ecdsa(sighash) if not fixed_length: break tx.vin[0].scriptSig = CScript([der_sig + bytes(bytearray([SIGHASH_ALL]))]) def generate(self, num_blocks, **kwargs): """Generate blocks with coinbase outputs to the internal address, and append the outputs to the internal list""" blocks = self._test_node.generatetodescriptor(num_blocks, self.get_descriptor(), **kwargs) for b in blocks: block_info = self._test_node.getblock(blockhash=b, verbosity=2) cb_tx = block_info['tx'][0] self._utxos.append({'txid': cb_tx['txid'], 'vout': 0, 'value': cb_tx['vout'][0]['value'], 'height': block_info['height']}) return blocks def get_descriptor(self): return descsum_create(f'raw({self._scriptPubKey.hex()})') def get_address(self): return self._address def get_utxo(self, *, txid: Optional[str]='', mark_as_spent=True): """ Returns a utxo and marks it as spent (pops it from the internal list) Args: txid: get the first utxo we find from a specific transaction Note: Can be used to get the change output immediately after a send_self_transfer """ index = -1 # by default the last utxo if txid: utxo = next(filter(lambda utxo: txid == utxo['txid'], self._utxos)) index = self._utxos.index(utxo) if mark_as_spent: return self._utxos.pop(index) else: return self._utxos[index] def send_self_transfer(self, **kwargs): """Create and send a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed.""" tx = self.create_self_transfer(**kwargs) self.sendrawtransaction(from_node=kwargs['from_node'], tx_hex=tx['hex']) return tx def send_to(self, *, from_node, scriptPubKey, amount, fee=1000): """ Create and send a tx with an output to a given scriptPubKey/amount, plus a change output to our internal address. To keep things simple, a fixed fee given in Satoshi is used. Note that this method fails if there is no single internal utxo available that can cover the cost for the amount and the fixed fee (the utxo with the largest value is taken). Returns a tuple (txid, n) referring to the created external utxo outpoint. """ tx = self.create_self_transfer(from_node=from_node, fee_rate=0, mempool_valid=False)['tx'] assert_greater_than_or_equal(tx.vout[0].nValue, amount + fee) tx.vout[0].nValue -= (amount + fee) # change output -> MiniWallet tx.vout.append(CTxOut(amount, scriptPubKey)) # arbitrary output -> to be returned txid = self.sendrawtransaction(from_node=from_node, tx_hex=tx.serialize().hex()) return txid, 1 def create_self_transfer(self, *, fee_rate=Decimal("0.003"), from_node, utxo_to_spend=None, mempool_valid=True, locktime=0, sequence=0): """Create and return a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed.""" self._utxos = sorted(self._utxos, key=lambda k: (k['value'], -k['height'])) utxo_to_spend = utxo_to_spend or self._utxos.pop() # Pick the largest utxo (if none provided) and hope it covers the fee if self._priv_key is None: vsize = Decimal(104) # anyone-can-spend else: vsize = Decimal(168) # P2PK (73 bytes scriptSig + 35 bytes scriptPubKey + 60 bytes other) send_value = int(COIN * (utxo_to_spend['value'] - fee_rate * (vsize / 1000))) assert send_value > 0 tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(utxo_to_spend['txid'], 16), utxo_to_spend['vout']), nSequence=sequence)] tx.vout = [CTxOut(send_value, self._scriptPubKey)] tx.nLockTime = locktime if not self._address: # raw script if self._priv_key is not None: # P2PK, need to sign self.sign_tx(tx) else: # anyone-can-spend tx.vin[0].scriptSig = CScript([OP_NOP] * 43) # pad to identical size else: tx.wit.vtxinwit = [CTxInWitness()] tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE]), bytes([LEAF_VERSION_TAPSCRIPT]) + self._internal_key] tx_hex = tx.serialize().hex() tx_info = from_node.testmempoolaccept([tx_hex])[0] assert_equal(mempool_valid, tx_info['allowed']) if mempool_valid: assert_equal(tx_info['vsize'], vsize) assert_equal(tx_info['fees']['base'], utxo_to_spend['value'] - Decimal(send_value) / COIN) return {'txid': tx_info['txid'], 'wtxid': tx_info['wtxid'], 'hex': tx_hex, 'tx': tx} def sendrawtransaction(self, *, from_node, tx_hex): txid = from_node.sendrawtransaction(tx_hex) self.scan_tx(from_node.decoderawtransaction(tx_hex)) return txid
def run_test(self): # Create and fund a raw tx for sending 10 SYS psbtx1 = self.nodes[0].walletcreatefundedpsbt( [], {self.nodes[2].getnewaddress(): 10})['psbt'] # If inputs are specified, do not automatically add more: utxo1 = self.nodes[0].listunspent()[0] assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[0].walletcreatefundedpsbt, [{ "txid": utxo1['txid'], "vout": utxo1['vout'] }], {self.nodes[2].getnewaddress(): 90}) psbtx1 = self.nodes[0].walletcreatefundedpsbt( [{ "txid": utxo1['txid'], "vout": utxo1['vout'] }], {self.nodes[2].getnewaddress(): 90}, 0, {"add_inputs": True})['psbt'] assert_equal(len(self.nodes[0].decodepsbt(psbtx1)['tx']['vin']), 2) # Inputs argument can be null self.nodes[0].walletcreatefundedpsbt( None, {self.nodes[2].getnewaddress(): 10}) # Node 1 should not be able to add anything to it but still return the psbtx same as before psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt'] assert_equal(psbtx1, psbtx) # Node 0 should not be able to sign the transaction with the wallet is locked self.nodes[0].encryptwallet("password") assert_raises_rpc_error( -13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].walletprocesspsbt, psbtx) # Node 0 should be able to process without signing though unsigned_tx = self.nodes[0].walletprocesspsbt(psbtx, False) assert_equal(unsigned_tx['complete'], False) self.nodes[0].walletpassphrase(passphrase="password", timeout=1000000) # Sign the transaction and send signed_tx = self.nodes[0].walletprocesspsbt(psbtx)['psbt'] final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex'] self.nodes[0].sendrawtransaction(final_tx) # Manually selected inputs can be locked: assert_equal(len(self.nodes[0].listlockunspent()), 0) utxo1 = self.nodes[0].listunspent()[0] psbtx1 = self.nodes[0].walletcreatefundedpsbt( [{ "txid": utxo1['txid'], "vout": utxo1['vout'] }], {self.nodes[2].getnewaddress(): 1}, 0, {"lockUnspents": True})["psbt"] assert_equal(len(self.nodes[0].listlockunspent()), 1) # Locks are ignored for manually selected inputs self.nodes[0].walletcreatefundedpsbt([{ "txid": utxo1['txid'], "vout": utxo1['vout'] }], {self.nodes[2].getnewaddress(): 1}, 0) # Create p2sh, p2wpkh, and p2wsh addresses pubkey0 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress())['pubkey'] pubkey1 = self.nodes[1].getaddressinfo( self.nodes[1].getnewaddress())['pubkey'] pubkey2 = self.nodes[2].getaddressinfo( self.nodes[2].getnewaddress())['pubkey'] # Setup watchonly wallets self.nodes[2].createwallet(wallet_name='wmulti', disable_private_keys=True) wmulti = self.nodes[2].get_wallet_rpc('wmulti') # Create all the addresses p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address'] p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address'] p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address'] if not self.options.descriptors: wmulti.importaddress(p2sh) wmulti.importaddress(p2wsh) wmulti.importaddress(p2sh_p2wsh) p2wpkh = self.nodes[1].getnewaddress("", "bech32") p2pkh = self.nodes[1].getnewaddress("", "legacy") p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit") # fund those addresses rawtx = self.nodes[0].createrawtransaction( [], { p2sh: 10, p2wsh: 10, p2wpkh: 10, p2sh_p2wsh: 10, p2sh_p2wpkh: 10, p2pkh: 10 }) rawtx = self.nodes[0].fundrawtransaction(rawtx, {"changePosition": 3}) signed_tx = self.nodes[0].signrawtransactionwithwallet( rawtx['hex'])['hex'] txid = self.nodes[0].sendrawtransaction(signed_tx) self.generate(self.nodes[0], 6) # Find the output pos p2sh_pos = -1 p2wsh_pos = -1 p2wpkh_pos = -1 p2pkh_pos = -1 p2sh_p2wsh_pos = -1 p2sh_p2wpkh_pos = -1 decoded = self.nodes[0].decoderawtransaction(signed_tx) for out in decoded['vout']: if out['scriptPubKey']['address'] == p2sh: p2sh_pos = out['n'] elif out['scriptPubKey']['address'] == p2wsh: p2wsh_pos = out['n'] elif out['scriptPubKey']['address'] == p2wpkh: p2wpkh_pos = out['n'] elif out['scriptPubKey']['address'] == p2sh_p2wsh: p2sh_p2wsh_pos = out['n'] elif out['scriptPubKey']['address'] == p2sh_p2wpkh: p2sh_p2wpkh_pos = out['n'] elif out['scriptPubKey']['address'] == p2pkh: p2pkh_pos = out['n'] inputs = [{ "txid": txid, "vout": p2wpkh_pos }, { "txid": txid, "vout": p2sh_p2wpkh_pos }, { "txid": txid, "vout": p2pkh_pos }] outputs = [{self.nodes[1].getnewaddress(): 29.99}] # spend single key from node 1 created_psbt = self.nodes[1].walletcreatefundedpsbt(inputs, outputs) walletprocesspsbt_out = self.nodes[1].walletprocesspsbt( created_psbt['psbt']) # Make sure it has both types of UTXOs decoded = self.nodes[1].decodepsbt(walletprocesspsbt_out['psbt']) assert 'non_witness_utxo' in decoded['inputs'][0] assert 'witness_utxo' in decoded['inputs'][0] # Check decodepsbt fee calculation (input values shall only be counted once per UTXO) assert_equal(decoded['fee'], created_psbt['fee']) assert_equal(walletprocesspsbt_out['complete'], True) self.nodes[1].sendrawtransaction(self.nodes[1].finalizepsbt( walletprocesspsbt_out['psbt'])['hex']) self.log.info( "Test walletcreatefundedpsbt fee rate of 10000 sat/vB and 0.1 SYS/kvB produces a total fee at or slightly below -maxtxfee (~0.05290000)" ) res1 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, { "fee_rate": 10000, "add_inputs": True }) assert_approx(res1["fee"], 0.055, 0.005) res2 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, { "feeRate": "0.1", "add_inputs": True }) assert_approx(res2["fee"], 0.055, 0.005) self.log.info( "Test min fee rate checks with walletcreatefundedpsbt are bypassed, e.g. a fee_rate under 1 sat/vB is allowed" ) res3 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, { "fee_rate": "0.999", "add_inputs": True }) assert_approx(res3["fee"], 0.00000381, 0.0000001) res4 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, { "feeRate": 0.00000999, "add_inputs": True }) assert_approx(res4["fee"], 0.00000381, 0.0000001) self.log.info( "Test min fee rate checks with walletcreatefundedpsbt are bypassed and that funding non-standard 'zero-fee' transactions is valid" ) for param, zero_value in product( ["fee_rate", "feeRate"], [0, 0.000, 0.00000000, "0", "0.000", "0.00000000"]): assert_equal( 0, self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, { param: zero_value, "add_inputs": True })["fee"]) self.log.info("Test invalid fee rate settings") for param, value in {("fee_rate", 100000), ("feeRate", 1)}: assert_raises_rpc_error( -4, "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { param: value, "add_inputs": True }) assert_raises_rpc_error(-3, "Amount out of range", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { param: -1, "add_inputs": True }) assert_raises_rpc_error(-3, "Amount is not a number or string", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { param: { "foo": "bar" }, "add_inputs": True }) # Test fee rate values that don't pass fixed-point parsing checks. for invalid_value in [ "", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999" ]: assert_raises_rpc_error(-3, "Invalid amount", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { param: invalid_value, "add_inputs": True }) # Test fee_rate values that cannot be represented in sat/vB. for invalid_value in [ 0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999" ]: assert_raises_rpc_error(-3, "Invalid amount", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "fee_rate": invalid_value, "add_inputs": True }) self.log.info( "- raises RPC error if both feeRate and fee_rate are passed") assert_raises_rpc_error( -8, "Cannot specify both fee_rate (sat/vB) and feeRate (SYS/kvB)", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "fee_rate": 0.1, "feeRate": 0.1, "add_inputs": True }) self.log.info( "- raises RPC error if both feeRate and estimate_mode passed") assert_raises_rpc_error( -8, "Cannot specify both estimate_mode and feeRate", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "estimate_mode": "economical", "feeRate": 0.1, "add_inputs": True }) for param in ["feeRate", "fee_rate"]: self.log.info( "- raises RPC error if both {} and conf_target are passed". format(param)) assert_raises_rpc_error( -8, "Cannot specify both conf_target and {}. Please provide either a confirmation " "target in blocks for automatic fee estimation, or an explicit fee rate." .format(param), self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { param: 1, "conf_target": 1, "add_inputs": True }) self.log.info( "- raises RPC error if both fee_rate and estimate_mode are passed") assert_raises_rpc_error( -8, "Cannot specify both estimate_mode and fee_rate", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "fee_rate": 1, "estimate_mode": "economical", "add_inputs": True }) self.log.info("- raises RPC error with invalid estimate_mode settings") for k, v in {"number": 42, "object": {"foo": "bar"}}.items(): assert_raises_rpc_error( -3, "Expected type string for estimate_mode, got {}".format(k), self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "estimate_mode": v, "conf_target": 0.1, "add_inputs": True }) for mode in ["", "foo", Decimal("3.141592")]: assert_raises_rpc_error( -8, 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"', self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "estimate_mode": mode, "conf_target": 0.1, "add_inputs": True }) self.log.info("- raises RPC error with invalid conf_target settings") for mode in ["unset", "economical", "conservative"]: self.log.debug("{}".format(mode)) for k, v in {"string": "", "object": {"foo": "bar"}}.items(): assert_raises_rpc_error( -3, "Expected type number for conf_target, got {}".format(k), self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "estimate_mode": mode, "conf_target": v, "add_inputs": True }) for n in [-1, 0, 1009]: assert_raises_rpc_error( -8, "Invalid conf_target, must be between 1 and 1008", # max value of 1008 per src/policy/fees.h self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, { "estimate_mode": mode, "conf_target": n, "add_inputs": True }) self.log.info( "Test walletcreatefundedpsbt with too-high fee rate produces total fee well above -maxtxfee and raises RPC error" ) # previously this was silently capped at -maxtxfee for bool_add, outputs_array in { True: outputs, False: [{ self.nodes[1].getnewaddress(): 1 }] }.items(): msg = "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)" assert_raises_rpc_error(-4, msg, self.nodes[1].walletcreatefundedpsbt, inputs, outputs_array, 0, { "fee_rate": 1000000, "add_inputs": bool_add }) assert_raises_rpc_error(-4, msg, self.nodes[1].walletcreatefundedpsbt, inputs, outputs_array, 0, { "feeRate": 1, "add_inputs": bool_add }) self.log.info("Test various PSBT operations") # partially sign multisig things with node 1 psbtx = wmulti.walletcreatefundedpsbt( inputs=[{ "txid": txid, "vout": p2wsh_pos }, { "txid": txid, "vout": p2sh_pos }, { "txid": txid, "vout": p2sh_p2wsh_pos }], outputs={self.nodes[1].getnewaddress(): 29.99}, options={'changeAddress': self.nodes[1].getrawchangeaddress()})['psbt'] walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(psbtx) psbtx = walletprocesspsbt_out['psbt'] assert_equal(walletprocesspsbt_out['complete'], False) # Unload wmulti, we don't need it anymore wmulti.unloadwallet() # partially sign with node 2. This should be complete and sendable walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx) assert_equal(walletprocesspsbt_out['complete'], True) self.nodes[2].sendrawtransaction(self.nodes[2].finalizepsbt( walletprocesspsbt_out['psbt'])['hex']) # check that walletprocesspsbt fails to decode a non-psbt rawtx = self.nodes[1].createrawtransaction( [{ "txid": txid, "vout": p2wpkh_pos }], {self.nodes[1].getnewaddress(): 9.99}) assert_raises_rpc_error(-22, "TX decode failed", self.nodes[1].walletprocesspsbt, rawtx) # Convert a non-psbt to psbt and make sure we can decode it rawtx = self.nodes[0].createrawtransaction( [], {self.nodes[1].getnewaddress(): 10}) rawtx = self.nodes[0].fundrawtransaction(rawtx) new_psbt = self.nodes[0].converttopsbt(rawtx['hex']) self.nodes[0].decodepsbt(new_psbt) # Make sure that a non-psbt with signatures cannot be converted # Error could be either "TX decode failed" (segwit inputs causes parsing to fail) or "Inputs must not have scriptSigs and scriptWitnesses" # We must set iswitness=True because the serialized transaction has inputs and is therefore a witness transaction signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex']) assert_raises_rpc_error(-22, "", self.nodes[0].converttopsbt, hexstring=signedtx['hex'], iswitness=True) assert_raises_rpc_error(-22, "", self.nodes[0].converttopsbt, hexstring=signedtx['hex'], permitsigdata=False, iswitness=True) # Unless we allow it to convert and strip signatures self.nodes[0].converttopsbt(signedtx['hex'], True) # Explicitly allow converting non-empty txs new_psbt = self.nodes[0].converttopsbt(rawtx['hex']) self.nodes[0].decodepsbt(new_psbt) # Create outputs to nodes 1 and 2 node1_addr = self.nodes[1].getnewaddress() node2_addr = self.nodes[2].getnewaddress() txid1 = self.nodes[0].sendtoaddress(node1_addr, 13) txid2 = self.nodes[0].sendtoaddress(node2_addr, 13) blockhash = self.generate(self.nodes[0], 6)[0] vout1 = find_output(self.nodes[1], txid1, 13, blockhash=blockhash) vout2 = find_output(self.nodes[2], txid2, 13, blockhash=blockhash) # Create a psbt spending outputs from nodes 1 and 2 psbt_orig = self.nodes[0].createpsbt( [{ "txid": txid1, "vout": vout1 }, { "txid": txid2, "vout": vout2 }], {self.nodes[0].getnewaddress(): 25.999}) # Update psbts, should only have data for one input and not the other psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig, False, "ALL")['psbt'] psbt1_decoded = self.nodes[0].decodepsbt(psbt1) assert psbt1_decoded['inputs'][0] and not psbt1_decoded['inputs'][1] # Check that BIP32 path was added assert "bip32_derivs" in psbt1_decoded['inputs'][0] psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig, False, "ALL", False)['psbt'] psbt2_decoded = self.nodes[0].decodepsbt(psbt2) assert not psbt2_decoded['inputs'][0] and psbt2_decoded['inputs'][1] # Check that BIP32 paths were not added assert "bip32_derivs" not in psbt2_decoded['inputs'][1] # Sign PSBTs (workaround issue #18039) psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig)['psbt'] psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig)['psbt'] # Combine, finalize, and send the psbts combined = self.nodes[0].combinepsbt([psbt1, psbt2]) finalized = self.nodes[0].finalizepsbt(combined)['hex'] self.nodes[0].sendrawtransaction(finalized) self.generate(self.nodes[0], 6) # Test additional args in walletcreatepsbt # Make sure both pre-included and funded inputs # have the correct sequence numbers based on # replaceable arg block_height = self.nodes[0].getblockcount() unspent = self.nodes[0].listunspent()[0] psbtx_info = self.nodes[0].walletcreatefundedpsbt( [{ "txid": unspent["txid"], "vout": unspent["vout"] }], [{ self.nodes[2].getnewaddress(): unspent["amount"] + 1 }], block_height + 2, { "replaceable": False, "add_inputs": True }, False) decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) assert "bip32_derivs" not in psbt_in assert_equal(decoded_psbt["tx"]["locktime"], block_height + 2) # Same construction with only locktime set and RBF explicitly enabled psbtx_info = self.nodes[0].walletcreatefundedpsbt( [{ "txid": unspent["txid"], "vout": unspent["vout"] }], [{ self.nodes[2].getnewaddress(): unspent["amount"] + 1 }], block_height, { "replaceable": True, "add_inputs": True }, True) decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) assert "bip32_derivs" in psbt_in assert_equal(decoded_psbt["tx"]["locktime"], block_height) # Same construction without optional arguments psbtx_info = self.nodes[0].walletcreatefundedpsbt( [], [{ self.nodes[2].getnewaddress(): unspent["amount"] + 1 }]) decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) assert "bip32_derivs" in psbt_in assert_equal(decoded_psbt["tx"]["locktime"], 0) # Same construction without optional arguments, for a node with -walletrbf=0 unspent1 = self.nodes[1].listunspent()[0] psbtx_info = self.nodes[1].walletcreatefundedpsbt( [{ "txid": unspent1["txid"], "vout": unspent1["vout"] }], [{ self.nodes[2].getnewaddress(): unspent1["amount"] + 1 }], block_height, {"add_inputs": True}) decoded_psbt = self.nodes[1].decodepsbt(psbtx_info["psbt"]) for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) assert "bip32_derivs" in psbt_in # Make sure change address wallet does not have P2SH innerscript access to results in success # when attempting BnB coin selection self.nodes[0].walletcreatefundedpsbt( [], [{ self.nodes[2].getnewaddress(): unspent["amount"] + 1 }], block_height + 2, {"changeAddress": self.nodes[1].getnewaddress()}, False) # Make sure the wallet's change type is respected by default small_output = {self.nodes[0].getnewaddress(): 0.1} psbtx_native = self.nodes[0].walletcreatefundedpsbt([], [small_output]) self.assert_change_type(psbtx_native, "witness_v0_keyhash") psbtx_legacy = self.nodes[1].walletcreatefundedpsbt([], [small_output]) self.assert_change_type(psbtx_legacy, "pubkeyhash") # Make sure the change type of the wallet can also be overwritten psbtx_np2wkh = self.nodes[1].walletcreatefundedpsbt( [], [small_output], 0, {"change_type": "p2sh-segwit"}) self.assert_change_type(psbtx_np2wkh, "scripthash") # Make sure the change type cannot be specified if a change address is given invalid_options = { "change_type": "legacy", "changeAddress": self.nodes[0].getnewaddress() } assert_raises_rpc_error( -8, "both change address and address type options", self.nodes[0].walletcreatefundedpsbt, [], [small_output], 0, invalid_options) # Regression test for 14473 (mishandling of already-signed witness transaction): psbtx_info = self.nodes[0].walletcreatefundedpsbt( [{ "txid": unspent["txid"], "vout": unspent["vout"] }], [{ self.nodes[2].getnewaddress(): unspent["amount"] + 1 }], 0, {"add_inputs": True}) complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"]) double_processed_psbt = self.nodes[0].walletprocesspsbt( complete_psbt["psbt"]) assert_equal(complete_psbt, double_processed_psbt) # We don't care about the decode result, but decoding must succeed. self.nodes[0].decodepsbt(double_processed_psbt["psbt"]) # Make sure unsafe inputs are included if specified self.nodes[2].createwallet(wallet_name="unsafe") wunsafe = self.nodes[2].get_wallet_rpc("unsafe") self.nodes[0].sendtoaddress(wunsafe.getnewaddress(), 2) self.sync_mempools() assert_raises_rpc_error(-4, "Insufficient funds", wunsafe.walletcreatefundedpsbt, [], [{ self.nodes[0].getnewaddress(): 1 }]) wunsafe.walletcreatefundedpsbt([], [{ self.nodes[0].getnewaddress(): 1 }], 0, {"include_unsafe": True}) # BIP 174 Test Vectors # Check that unknown values are just passed through unknown_psbt = "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAACg8BAgMEBQYHCAkPAQIDBAUGBwgJCgsMDQ4PAAA=" unknown_out = self.nodes[0].walletprocesspsbt(unknown_psbt)['psbt'] assert_equal(unknown_psbt, unknown_out) # Open the data file with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_psbt.json'), encoding='utf-8') as f: d = json.load(f) invalids = d['invalid'] valids = d['valid'] creators = d['creator'] signers = d['signer'] combiners = d['combiner'] finalizers = d['finalizer'] extractors = d['extractor'] # Invalid PSBTs for invalid in invalids: assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decodepsbt, invalid) # Valid PSBTs for valid in valids: self.nodes[0].decodepsbt(valid) # Creator Tests for creator in creators: created_tx = self.nodes[0].createpsbt(creator['inputs'], creator['outputs']) assert_equal(created_tx, creator['result']) # Signer tests for i, signer in enumerate(signers): self.nodes[2].createwallet(wallet_name="wallet{}".format(i)) wrpc = self.nodes[2].get_wallet_rpc("wallet{}".format(i)) for key in signer['privkeys']: wrpc.importprivkey(key) signed_tx = wrpc.walletprocesspsbt(signer['psbt'])['psbt'] assert_equal(signed_tx, signer['result']) # Combiner test for combiner in combiners: combined = self.nodes[2].combinepsbt(combiner['combine']) assert_equal(combined, combiner['result']) # Empty combiner test assert_raises_rpc_error(-8, "Parameter 'txs' cannot be empty", self.nodes[0].combinepsbt, []) # Finalizer test for finalizer in finalizers: finalized = self.nodes[2].finalizepsbt(finalizer['finalize'], False)['psbt'] assert_equal(finalized, finalizer['result']) # Extractor test for extractor in extractors: extracted = self.nodes[2].finalizepsbt(extractor['extract'], True)['hex'] assert_equal(extracted, extractor['result']) # Unload extra wallets for i, signer in enumerate(signers): self.nodes[2].unloadwallet("wallet{}".format(i)) # TODO: Re-enable this for segwit v1 # self.test_utxo_conversion() # Test that psbts with p2pkh outputs are created properly p2pkh = self.nodes[0].getnewaddress(address_type='legacy') psbt = self.nodes[1].walletcreatefundedpsbt([], [{ p2pkh: 1 }], 0, {"includeWatching": True}, True) self.nodes[0].decodepsbt(psbt['psbt']) # Test decoding error: invalid base64 assert_raises_rpc_error(-22, "TX decode failed invalid base64", self.nodes[0].decodepsbt, ";definitely not base64;") # Send to all types of addresses addr1 = self.nodes[1].getnewaddress("", "bech32") txid1 = self.nodes[0].sendtoaddress(addr1, 11) vout1 = find_output(self.nodes[0], txid1, 11) addr2 = self.nodes[1].getnewaddress("", "legacy") txid2 = self.nodes[0].sendtoaddress(addr2, 11) vout2 = find_output(self.nodes[0], txid2, 11) addr3 = self.nodes[1].getnewaddress("", "p2sh-segwit") txid3 = self.nodes[0].sendtoaddress(addr3, 11) vout3 = find_output(self.nodes[0], txid3, 11) self.sync_all() def test_psbt_input_keys(psbt_input, keys): """Check that the psbt input has only the expected keys.""" assert_equal(set(keys), set(psbt_input.keys())) # Create a PSBT. None of the inputs are filled initially psbt = self.nodes[1].createpsbt([{ "txid": txid1, "vout": vout1 }, { "txid": txid2, "vout": vout2 }, { "txid": txid3, "vout": vout3 }], {self.nodes[0].getnewaddress(): 32.999}) decoded = self.nodes[1].decodepsbt(psbt) test_psbt_input_keys(decoded['inputs'][0], []) test_psbt_input_keys(decoded['inputs'][1], []) test_psbt_input_keys(decoded['inputs'][2], []) # Update a PSBT with UTXOs from the node # Bech32 inputs should be filled with witness UTXO. Other inputs should not be filled because they are non-witness updated = self.nodes[1].utxoupdatepsbt(psbt) decoded = self.nodes[1].decodepsbt(updated) test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo']) test_psbt_input_keys(decoded['inputs'][1], []) test_psbt_input_keys(decoded['inputs'][2], []) # Try again, now while providing descriptors, making P2SH-segwit work, and causing bip32_derivs and redeem_script to be filled in descs = [ self.nodes[1].getaddressinfo(addr)['desc'] for addr in [addr1, addr2, addr3] ] updated = self.nodes[1].utxoupdatepsbt(psbt=psbt, descriptors=descs) decoded = self.nodes[1].decodepsbt(updated) test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'bip32_derivs']) test_psbt_input_keys(decoded['inputs'][1], []) test_psbt_input_keys(decoded['inputs'][2], ['witness_utxo', 'bip32_derivs', 'redeem_script']) # Two PSBTs with a common input should not be joinable psbt1 = self.nodes[1].createpsbt( [{ "txid": txid1, "vout": vout1 }], {self.nodes[0].getnewaddress(): Decimal('10.999')}) assert_raises_rpc_error(-8, "exists in multiple PSBTs", self.nodes[1].joinpsbts, [psbt1, updated]) # Join two distinct PSBTs addr4 = self.nodes[1].getnewaddress("", "p2sh-segwit") txid4 = self.nodes[0].sendtoaddress(addr4, 5) vout4 = find_output(self.nodes[0], txid4, 5) self.generate(self.nodes[0], 6) psbt2 = self.nodes[1].createpsbt( [{ "txid": txid4, "vout": vout4 }], {self.nodes[0].getnewaddress(): Decimal('4.999')}) psbt2 = self.nodes[1].walletprocesspsbt(psbt2)['psbt'] psbt2_decoded = self.nodes[0].decodepsbt(psbt2) assert "final_scriptwitness" in psbt2_decoded['inputs'][ 0] and "final_scriptSig" in psbt2_decoded['inputs'][0] joined = self.nodes[0].joinpsbts([psbt, psbt2]) joined_decoded = self.nodes[0].decodepsbt(joined) assert len(joined_decoded['inputs']) == 4 and len( joined_decoded['outputs'] ) == 2 and "final_scriptwitness" not in joined_decoded['inputs'][ 3] and "final_scriptSig" not in joined_decoded['inputs'][3] # Check that joining shuffles the inputs and outputs # 10 attempts should be enough to get a shuffled join shuffled = False for _ in range(10): shuffled_joined = self.nodes[0].joinpsbts([psbt, psbt2]) shuffled |= joined != shuffled_joined if shuffled: break assert shuffled # Newly created PSBT needs UTXOs and updating addr = self.nodes[1].getnewaddress("", "p2sh-segwit") txid = self.nodes[0].sendtoaddress(addr, 7) addrinfo = self.nodes[1].getaddressinfo(addr) blockhash = self.generate(self.nodes[0], 6)[0] vout = find_output(self.nodes[0], txid, 7, blockhash=blockhash) psbt = self.nodes[1].createpsbt( [{ "txid": txid, "vout": vout }], {self.nodes[0].getnewaddress("", "p2sh-segwit"): Decimal('6.999')}) analyzed = self.nodes[0].analyzepsbt(psbt) assert not analyzed['inputs'][0]['has_utxo'] and not analyzed[ 'inputs'][0]['is_final'] and analyzed['inputs'][0][ 'next'] == 'updater' and analyzed['next'] == 'updater' # After update with wallet, only needs signing updated = self.nodes[1].walletprocesspsbt(psbt, False, 'ALL', True)['psbt'] analyzed = self.nodes[0].analyzepsbt(updated) assert analyzed['inputs'][0][ 'has_utxo'] and not analyzed['inputs'][0]['is_final'] and analyzed[ 'inputs'][0]['next'] == 'signer' and analyzed[ 'next'] == 'signer' and analyzed['inputs'][0]['missing'][ 'signatures'][0] == addrinfo['embedded'][ 'witness_program'] # Check fee and size things assert analyzed['fee'] == Decimal( '0.001') and analyzed['estimated_vsize'] == 134 and analyzed[ 'estimated_feerate'] == Decimal('0.00746268') # After signing and finalizing, needs extracting signed = self.nodes[1].walletprocesspsbt(updated)['psbt'] analyzed = self.nodes[0].analyzepsbt(signed) assert analyzed['inputs'][0]['has_utxo'] and analyzed['inputs'][0][ 'is_final'] and analyzed['next'] == 'extractor' self.log.info( "PSBT spending unspendable outputs should have error message and Creator as next" ) analysis = self.nodes[0].analyzepsbt( 'cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWAEHYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFv8/wADXYP/7//////8JxOh0LR2HAI8AAAAAAAEBIADC6wsAAAAAF2oUt/X69ELjeX2nTof+fZ10l+OyAokDAQcJAwEHEAABAACAAAEBIADC6wsAAAAAF2oUt/X69ELjeX2nTof+fZ10l+OyAokDAQcJAwEHENkMak8AAAAA' ) assert_equal(analysis['next'], 'creator') assert_equal(analysis['error'], 'PSBT is not valid. Input 0 spends unspendable output') self.log.info( "PSBT with invalid values should have error message and Creator as next" ) analysis = self.nodes[0].analyzepsbt( 'cHNidP8BAHECAAAAAfA00BFgAm6tp86RowwH6BMImQNL5zXUcTT97XoLGz0BAAAAAAD/////AgD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XL87QKVAAAAABYAFPck4gF7iL4NL4wtfRAKgQbghiTUAAAAAAABAR8AgIFq49AHABYAFJUDtxf2PHo641HEOBOAIvFMNTr2AAAA' ) # SYSCOIN TODO: update values so moneyrange fails in psbt 21m vs 888m #assert_equal(analysis['next'], 'creator') #assert_equal(analysis['error'], 'PSBT is not valid. Input 0 has invalid value') self.log.info( "PSBT with signed, but not finalized, inputs should have Finalizer as next" ) analysis = self.nodes[0].analyzepsbt( 'cHNidP8BAHECAAAAAZYezcxdnbXoQCmrD79t/LzDgtUo9ERqixk8wgioAobrAAAAAAD9////AlDDAAAAAAAAFgAUy/UxxZuzZswcmFnN/E9DGSiHLUsuGPUFAAAAABYAFLsH5o0R38wXx+X2cCosTMCZnQ4baAAAAAABAR8A4fUFAAAAABYAFOBI2h5thf3+Lflb2LGCsVSZwsltIgIC/i4dtVARCRWtROG0HHoGcaVklzJUcwo5homgGkSNAnJHMEQCIGx7zKcMIGr7cEES9BR4Kdt/pzPTK3fKWcGyCJXb7MVnAiALOBgqlMH4GbC1HDh/HmylmO54fyEy4lKde7/BT/PWxwEBAwQBAAAAIgYC/i4dtVARCRWtROG0HHoGcaVklzJUcwo5homgGkSNAnIYDwVpQ1QAAIABAACAAAAAgAAAAAAAAAAAAAAiAgL+CIiB59NSCssOJRGiMYQK1chahgAaaJpIXE41Cyir+xgPBWlDVAAAgAEAAIAAAACAAQAAAAAAAAAA' ) assert_equal(analysis['next'], 'finalizer') analysis = self.nodes[0].analyzepsbt( 'cHNidP8BAHECAAAAAfA00BFgAm6tp86RowwH6BMImQNL5zXUcTT97XoLGz0BAAAAAAD/////AgCAgWrj0AcAFgAUKNw0x8HRctAgmvoevm4u1SbN7XL87QKVAAAAABYAFPck4gF7iL4NL4wtfRAKgQbghiTUAAAAAAABAR8A8gUqAQAAABYAFJUDtxf2PHo641HEOBOAIvFMNTr2AAAA' ) #assert_equal(analysis['next'], 'creator') #assert_equal(analysis['error'], 'PSBT is not valid. Output amount invalid') analysis = self.nodes[0].analyzepsbt( 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==' ) assert_equal(analysis['next'], 'creator') assert_equal(analysis['error'], 'PSBT is not valid. Input 0 specifies invalid prevout') assert_raises_rpc_error( -25, 'Inputs missing or spent', self.nodes[0].walletprocesspsbt, 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==' ) # Test that we can fund psbts with external inputs specified eckey = ECKey() eckey.generate() privkey = bytes_to_wif(eckey.get_bytes()) # Make a weird but signable script. sh(pkh()) descriptor accomplishes this desc = descsum_create("sh(pkh({}))".format(privkey)) if self.options.descriptors: res = self.nodes[0].importdescriptors([{ "desc": desc, "timestamp": "now" }]) else: res = self.nodes[0].importmulti([{ "desc": desc, "timestamp": "now" }]) assert res[0]["success"] addr = self.nodes[0].deriveaddresses(desc)[0] addr_info = self.nodes[0].getaddressinfo(addr) self.nodes[0].sendtoaddress(addr, 10) self.generate(self.nodes[0], 6) ext_utxo = self.nodes[0].listunspent(addresses=[addr])[0] # An external input without solving data should result in an error assert_raises_rpc_error( -4, "Insufficient funds", self.nodes[1].walletcreatefundedpsbt, [ext_utxo], {self.nodes[0].getnewaddress(): 10 + ext_utxo['amount']}, 0, {'add_inputs': True}) # But funding should work when the solving data is provided psbt = self.nodes[1].walletcreatefundedpsbt( [ext_utxo], {self.nodes[0].getnewaddress(): 15}, 0, { 'add_inputs': True, "solving_data": { "pubkeys": [addr_info['pubkey']], "scripts": [addr_info["embedded"]["scriptPubKey"]] } }) signed = self.nodes[1].walletprocesspsbt(psbt['psbt']) assert not signed['complete'] signed = self.nodes[0].walletprocesspsbt(signed['psbt']) assert signed['complete'] self.nodes[0].finalizepsbt(signed['psbt']) psbt = self.nodes[1].walletcreatefundedpsbt( [ext_utxo], {self.nodes[0].getnewaddress(): 15}, 0, { 'add_inputs': True, "solving_data": { "descriptors": [desc] } }) signed = self.nodes[1].walletprocesspsbt(psbt['psbt']) assert not signed['complete'] signed = self.nodes[0].walletprocesspsbt(signed['psbt']) assert signed['complete'] self.nodes[0].finalizepsbt(signed['psbt'])
def prepare_tx_signed_with_sighash(self, address_type, sighash_rangeproof_aware): # Create a tx that is signed with a specific version of the sighash # method. # If `sighash_rangeproof_aware` is # true, the sighash will contain the rangeproofs if SIGHASH_RANGEPROOF is set # false, the sighash will NOT contain the rangeproofs if SIGHASH_RANGEPROOF is set addr = self.nodes[1].getnewaddress("", address_type) assert len(self.nodes[1].getaddressinfo(addr)["confidential_key"]) > 0 self.nodes[0].sendtoaddress(addr, 1.0) self.nodes[0].generate(1) self.sync_all() utxo = self.nodes[1].listunspent(1, 1, [addr])[0] utxo_tx = FromHex(CTransaction(), self.nodes[1].getrawtransaction(utxo["txid"])) utxo_spk = CScript(hex_str_to_bytes(utxo["scriptPubKey"])) utxo_value = utxo_tx.vout[utxo["vout"]].nValue assert len(utxo["amountblinder"]) > 0 sink_addr = self.nodes[2].getnewaddress() unsigned_hex = self.nodes[1].createrawtransaction([{ "txid": utxo["txid"], "vout": utxo["vout"] }], { sink_addr: 0.9, "fee": 0.1 }) blinded_hex = self.nodes[1].blindrawtransaction(unsigned_hex) blinded_tx = FromHex(CTransaction(), blinded_hex) signed_hex = self.nodes[1].signrawtransactionwithwallet( blinded_hex)["hex"] signed_tx = FromHex(CTransaction(), signed_hex) # Make sure that the tx the node produced is always valid. test_accept = self.nodes[0].testmempoolaccept([signed_hex])[0] assert test_accept["allowed"], "not accepted: {}".format( test_accept["reject-reason"]) # Prepare the keypair we need to re-sign the tx. wif = self.nodes[1].dumpprivkey(addr) (b, v) = base58_to_byte(wif) privkey = ECKey() privkey.set(b[0:32], len(b) == 33) pubkey = privkey.get_pubkey() # Now we need to replace the signature with an equivalent one with the new sighash set. hashtype = SIGHASH_ALL | SIGHASH_RANGEPROOF if address_type == "legacy": if sighash_rangeproof_aware: (sighash, _) = LegacySignatureHash(utxo_spk, blinded_tx, 0, hashtype) else: (sighash, _) = LegacySignatureHash(utxo_spk, blinded_tx, 0, hashtype, enable_sighash_rangeproof=False) signature = privkey.sign_ecdsa(sighash) + chr(hashtype).encode( 'latin-1') assert len(signature) <= 0xfc assert len(pubkey.get_bytes()) <= 0xfc signed_tx.vin[0].scriptSig = CScript( struct.pack("<B", len(signature)) + signature + struct.pack("<B", len(pubkey.get_bytes())) + pubkey.get_bytes()) elif address_type == "bech32" or address_type == "p2sh-segwit": assert signed_tx.wit.vtxinwit[0].scriptWitness.stack[ 1] == pubkey.get_bytes() pubkeyhash = hash160(pubkey.get_bytes()) script = get_p2pkh_script(pubkeyhash) if sighash_rangeproof_aware: sighash = SegwitV0SignatureHash(script, blinded_tx, 0, hashtype, utxo_value) else: sighash = SegwitV0SignatureHash( script, blinded_tx, 0, hashtype, utxo_value, enable_sighash_rangeproof=False) signature = privkey.sign_ecdsa(sighash) + chr(hashtype).encode( 'latin-1') signed_tx.wit.vtxinwit[0].scriptWitness.stack[0] = signature else: assert False signed_tx.rehash() return signed_tx
class FullBlockSigOpsTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.extra_args = [['-noparkdeepreorg', '-maxreorgdepth=-1', '-phononactivationtime={}'.format(SIGOPS_DEACTIVATION_TIME)]] def run_test(self): self.bootstrap_p2p() # Add one p2p connection to the node self.block_heights = {} self.coinbase_key = ECKey() self.coinbase_key.generate() self.coinbase_pubkey = self.coinbase_key.get_pubkey().get_bytes() self.tip = None self.blocks = {} self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 self.spendable_outputs = [] # Create a new block b0 = self.next_block(0) self.save_spendable_output() self.sync_blocks([b0]) # Allow the block to mature blocks = [] for i in range(129): blocks.append(self.next_block(5000 + i)) self.save_spendable_output() self.sync_blocks(blocks) # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(33): out.append(self.get_spendable_output()) # Start by building a block on top. # setup -> b13 (0) b13 = self.next_block(13, spend=out[0]) self.save_spendable_output() self.sync_blocks([b13]) # Add a block with MAX_BLOCK_SIGOPS_PER_MB and one with one more sigop # setup -> b13 (0) -> b15 (5) -> b16 (6) self.log.info("Accept a block with lots of checksigs") lots_of_checksigs = CScript( [OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB - 1)) self.move_tip(13) b15 = self.next_block(15, spend=out[5], script=lots_of_checksigs) self.save_spendable_output() self.sync_blocks([b15], True) self.log.info("Reject a block with too many checksigs") too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB)) b16 = self.next_block(16, spend=out[6], script=too_many_checksigs) self.sync_blocks([b16], success=False, reject_reason='bad-blk-sigops', reconnect=True) self.move_tip(15) # ... skipped feature_block tests ... # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY # # setup -> ... b15 (5) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b36 (11) # \-> b34 (10) # \-> b32 (9) # # MULTISIG: each op code counts as 20 sigops. To create the edge case, # pack another 19 sigops at the end. self.log.info( "Accept a block with the max number of OP_CHECKMULTISIG sigops") lots_of_multisigs = CScript( [OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS_PER_MB - 1) // 20) + [OP_CHECKSIG] * 19) b31 = self.next_block(31, spend=out[8], script=lots_of_multisigs) assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS_PER_MB) self.sync_blocks([b31], True) self.save_spendable_output() # this goes over the limit because the coinbase has one sigop self.log.info("Reject a block with too many OP_CHECKMULTISIG sigops") too_many_multisigs = CScript( [OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS_PER_MB // 20)) b32 = self.next_block(32, spend=out[9], script=too_many_multisigs) assert_equal(get_legacy_sigopcount_block( b32), MAX_BLOCK_SIGOPS_PER_MB + 1) self.sync_blocks([b32], success=False, reject_reason='bad-blk-sigops', reconnect=True) # CHECKMULTISIGVERIFY self.log.info( "Accept a block with the max number of OP_CHECKMULTISIGVERIFY sigops") self.move_tip(31) lots_of_multisigs = CScript( [OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS_PER_MB - 1) // 20) + [OP_CHECKSIG] * 19) b33 = self.next_block(33, spend=out[9], script=lots_of_multisigs) self.sync_blocks([b33], True) self.save_spendable_output() self.log.info( "Reject a block with too many OP_CHECKMULTISIGVERIFY sigops") too_many_multisigs = CScript( [OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS_PER_MB // 20)) b34 = self.next_block(34, spend=out[10], script=too_many_multisigs) self.sync_blocks([b34], success=False, reject_reason='bad-blk-sigops', reconnect=True) # CHECKSIGVERIFY self.log.info( "Accept a block with the max number of OP_CHECKSIGVERIFY sigops") self.move_tip(33) lots_of_checksigs = CScript( [OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS_PER_MB - 1)) b35 = self.next_block(35, spend=out[10], script=lots_of_checksigs) self.sync_blocks([b35], True) self.save_spendable_output() self.log.info("Reject a block with too many OP_CHECKSIGVERIFY sigops") too_many_checksigs = CScript( [OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS_PER_MB)) b36 = self.next_block(36, spend=out[11], script=too_many_checksigs) self.sync_blocks([b36], success=False, reject_reason='bad-blk-sigops', reconnect=True) # ... skipped feature_block tests ... # Check P2SH SigOp counting # # # ... -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12) # \-> b40 (12) # # b39 - create some P2SH outputs that will require 6 sigops to spend: # # redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG # p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL # self.log.info("Check P2SH SIGOPS are correctly counted") self.move_tip(35) b39 = self.next_block(39) b39_outputs = 0 b39_sigops_per_output = 6 # Build the redeem script, hash it, use hash to create the p2sh script redeem_script = CScript([self.coinbase_pubkey] + [ OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG]) redeem_script_hash = hash160(redeem_script) p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE # This must be signed because it is spending a coinbase spend = out[11] tx = self.create_tx(spend, 0, 1, p2sh_script) tx.vout.append(CTxOut(spend.vout[0].nValue - 1, CScript([OP_TRUE]))) self.sign_tx(tx, spend) tx.rehash() b39 = self.update_block(39, [tx]) b39_outputs += 1 # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest # to OP_TRUE tx_new = None tx_last = tx tx_last_n = len(tx.vout) - 1 total_size = len(b39.serialize()) while(total_size < LEGACY_MAX_BLOCK_SIZE): tx_new = self.create_tx(tx_last, tx_last_n, 1, p2sh_script) tx_new.vout.append( CTxOut(tx_last.vout[tx_last_n].nValue - 1, CScript([OP_TRUE]))) tx_new.rehash() total_size += len(tx_new.serialize()) if total_size >= LEGACY_MAX_BLOCK_SIZE: break b39.vtx.append(tx_new) # add tx to block tx_last = tx_new tx_last_n = len(tx_new.vout) - 1 b39_outputs += 1 b39 = self.update_block(39, []) self.sync_blocks([b39], True) self.save_spendable_output() # Test sigops in P2SH redeem scripts # # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops. # The first tx has one sigop and then at the end we add 2 more to put us just over the max. # # b41 does the same, less one, so it has the maximum sigops permitted. # self.log.info("Reject a block with too many P2SH sigops") self.move_tip(39) b40 = self.next_block(40, spend=out[12]) sigops = get_legacy_sigopcount_block(b40) numTxs = (MAX_BLOCK_SIGOPS_PER_MB - sigops) // b39_sigops_per_output assert_equal(numTxs <= b39_outputs, True) lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) lastAmount = b40.vtx[1].vout[0].nValue new_txs = [] for i in range(1, numTxs + 1): tx = CTransaction() tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) tx.vin.append(CTxIn(lastOutpoint, b'')) # second input is corresponding P2SH output from b39 tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) # Note: must pass the redeem_script (not p2sh_script) to the # signature hash function sighash = SignatureHashForkId( redeem_script, tx, 1, SIGHASH_ALL | SIGHASH_FORKID, lastAmount) sig = self.coinbase_key.sign_ecdsa( sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])) scriptSig = CScript([sig, redeem_script]) tx.vin[1].scriptSig = scriptSig pad_tx(tx) tx.rehash() new_txs.append(tx) lastOutpoint = COutPoint(tx.sha256, 0) lastAmount = tx.vout[0].nValue b40_sigops_to_fill = MAX_BLOCK_SIGOPS_PER_MB - \ (numTxs * b39_sigops_per_output + sigops) + 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) pad_tx(tx) tx.rehash() new_txs.append(tx) self.update_block(40, new_txs) self.sync_blocks([b40], success=False, reject_reason='bad-blk-sigops', reconnect=True) # same as b40, but one less sigop self.log.info("Accept a block with the max number of P2SH sigops") self.move_tip(39) b41 = self.next_block(41, spend=None) self.update_block(41, [b40tx for b40tx in b40.vtx[1:] if b40tx != tx]) b41_sigops_to_fill = b40_sigops_to_fill - 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill))) pad_tx(tx) self.update_block(41, [tx]) self.sync_blocks([b41], True) # ... skipped feature_block tests ... b72 = self.next_block(72) self.save_spendable_output() self.sync_blocks([b72]) # Test some invalid scripts and MAX_BLOCK_SIGOPS_PER_MB # # ..... -> b72 # \-> b** (22) # # b73 - tx with excessive sigops that are placed after an excessively large script element. # The purpose of the test is to make sure those sigops are counted. # # script is a bytearray of size 20,526 # # bytearray[0-19,998] : OP_CHECKSIG # bytearray[19,999] : OP_PUSHDATA4 # bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format) # bytearray[20,004-20,525]: unread data (script_element) # bytearray[20,526] : OP_CHECKSIG (this puts us over the limit) self.log.info( "Reject a block containing too many sigops after a large script element") self.move_tip(72) b73 = self.next_block(73) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS_PER_MB - 1] = int("4e", 16) # OP_PUSHDATA4 element_size = MAX_SCRIPT_ELEMENT_SIZE + 1 a[MAX_BLOCK_SIGOPS_PER_MB] = element_size % 256 a[MAX_BLOCK_SIGOPS_PER_MB + 1] = element_size // 256 a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0 a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0 tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) b73 = self.update_block(73, [tx]) assert_equal(get_legacy_sigopcount_block( b73), MAX_BLOCK_SIGOPS_PER_MB + 1) self.sync_blocks([b73], success=False, reject_reason='bad-blk-sigops', reconnect=True) # b74/75 - if we push an invalid script element, all prevous sigops are counted, # but sigops after the element are not counted. # # The invalid script element is that the push_data indicates that # there will be a large amount of data (0xffffff bytes), but we only # provide a much smaller number. These bytes are CHECKSIGS so they would # cause b75 to fail for excessive sigops, if those bytes were counted. # # b74 fails because we put MAX_BLOCK_SIGOPS_PER_MB+1 before the element # b75 succeeds because we put MAX_BLOCK_SIGOPS_PER_MB before the # element self.log.info( "Check sigops are counted correctly after an invalid script element") self.move_tip(72) b74 = self.next_block(74) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + \ MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS_PER_MB] = 0x4e a[MAX_BLOCK_SIGOPS_PER_MB + 1] = 0xfe a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 4] = 0xff tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) b74 = self.update_block(74, [tx]) self.sync_blocks([b74], success=False, reject_reason='bad-blk-sigops', reconnect=True) self.move_tip(72) b75 = self.next_block(75) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS_PER_MB - 1] = 0x4e a[MAX_BLOCK_SIGOPS_PER_MB] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 1] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0xff a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0xff tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) b75 = self.update_block(75, [tx]) self.sync_blocks([b75], True) self.save_spendable_output() # Check that if we push an element filled with CHECKSIGs, they are not # counted self.move_tip(75) b76 = self.next_block(76) size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 a = bytearray([OP_CHECKSIG] * size) # PUSHDATA4, but leave the following bytes as just checksigs a[MAX_BLOCK_SIGOPS_PER_MB - 1] = 0x4e tx = self.create_and_sign_transaction(out[23], 1, CScript(a)) b76 = self.update_block(76, [tx]) self.sync_blocks([b76], True) self.save_spendable_output() # Helper methods ################ def add_transactions_to_block(self, block, tx_list): [tx.rehash() for tx in tx_list] block.vtx.extend(tx_list) # this is a little handier to use than the version in blocktools.py def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])): return create_tx_with_script( spend_tx, n, amount=value, script_pub_key=script) # sign a transaction, using the key we know about # this signs input 0 in tx, which is assumed to be spending output n in # spend_tx def sign_tx(self, tx, spend_tx): scriptPubKey = bytearray(spend_tx.vout[0].scriptPubKey) if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend tx.vin[0].scriptSig = CScript() return sighash = SignatureHashForkId( spend_tx.vout[0].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spend_tx.vout[0].nValue) tx.vin[0].scriptSig = CScript( [self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))]) def create_and_sign_transaction( self, spend_tx, value, script=CScript([OP_TRUE])): tx = self.create_tx(spend_tx, 0, value, script) self.sign_tx(tx, spend_tx) tx.rehash() return tx def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True): if self.tip is None: base_block_hash = self.genesis_hash block_time = int(time.time()) + 1 else: base_block_hash = self.tip.sha256 block_time = self.tip.nTime + 1 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) coinbase.vout[0].nValue += additional_coinbase_value coinbase.rehash() if spend is None: block = create_block(base_block_hash, coinbase, block_time) else: # all but one satoshi to fees coinbase.vout[0].nValue += spend.vout[0].nValue - 1 coinbase.rehash() block = create_block(base_block_hash, coinbase, block_time) # spend 1 satoshi tx = self.create_tx(spend, 0, 1, script) self.sign_tx(tx, spend) self.add_transactions_to_block(block, [tx]) block.hashMerkleRoot = block.calc_merkle_root() if solve: block.solve() self.tip = block self.block_heights[block.sha256] = height assert number not in self.blocks self.blocks[number] = block return block # save the current tip so it can be spent by a later block def save_spendable_output(self): self.log.debug("saving spendable output {}".format(self.tip.vtx[0])) self.spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(self): self.log.debug("getting spendable output {}".format( self.spendable_outputs[0].vtx[0])) return self.spendable_outputs.pop(0).vtx[0] # move the tip back to a previous block def move_tip(self, number): self.tip = self.blocks[number] # adds transactions to the block and updates state def update_block(self, block_number, new_transactions, reorder=True): block = self.blocks[block_number] self.add_transactions_to_block(block, new_transactions) old_sha256 = block.sha256 if reorder: make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block def bootstrap_p2p(self): """Add a P2P connection to the node. Helper to connect and wait for version handshake.""" self.nodes[0].add_p2p_connection(P2PDataStore()) # We need to wait for the initial getheaders from the peer before we # start populating our blockstore. If we don't, then we may run ahead # to the next subtest before we receive the getheaders. We'd then send # an INV for the next block and receive two getheaders - one for the # IBD and one for the INV. We'd respond to both and could get # unexpectedly disconnected if the DoS score for that error is 50. self.nodes[0].p2p.wait_for_getheaders(timeout=5) def reconnect_p2p(self): """Tear down and bootstrap the P2P connection to the node. The node gets disconnected several times in this test. This helper method reconnects the p2p and restarts the network thread.""" self.nodes[0].disconnect_p2ps() self.bootstrap_p2p() def sync_blocks(self, blocks, success=True, reject_reason=None, request_block=True, reconnect=False, timeout=60): """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block. Call with success = False if the tip shouldn't advance to the most recent block.""" self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason, request_block=request_block, timeout=timeout, expect_disconnect=reconnect) if reconnect: self.reconnect_p2p()
def gen_privkey(): pk = ECKey() pk.generate() return pk
def test_double_spend(self): ''' This tests the case where the same UTXO is spent twice on two separate blocks as part of a reorg. ab0 / \ aa1 [tx1] bb1 [tx2] | | aa2 bb2 | | aa3 bb3 | bb4 Problematic case: 1. User 1 receives BTC in tx1 from utxo1 in block aa1. 2. User 2 receives BTC in tx2 from utxo1 (same) in block bb1 3. User 1 sees 2 confirmations at block aa3. 4. Reorg into bb chain. 5. User 1 asks `listsinceblock aa3` and does not see that tx1 is now invalidated. Currently the solution to this is to detect that a reorg'd block is asked for in listsinceblock, and to iterate back over existing blocks up until the fork point, and to include all transactions that relate to the node wallet. ''' self.log.info("Test double spend") self.sync_all() # share utxo between nodes[1] and nodes[2] eckey = ECKey() eckey.generate() privkey = bytes_to_wif(eckey.get_bytes()) address = key_to_p2wpkh(eckey.get_pubkey().get_bytes()) self.nodes[2].sendtoaddress(address, 10) self.generate(self.nodes[2], 6) self.sync_all() self.nodes[2].importprivkey(privkey) utxos = self.nodes[2].listunspent() utxo = [u for u in utxos if u["address"] == address][0] self.nodes[1].importprivkey(privkey) # Split network into two self.split_network() # send from nodes[1] using utxo to nodes[0] change = '%.8f' % (float(utxo['amount']) - 1.0003) recipient_dict = { self.nodes[0].getnewaddress(): 1, self.nodes[1].getnewaddress(): change, } utxo_dicts = [{ 'txid': utxo['txid'], 'vout': utxo['vout'], }] txid1 = self.nodes[1].sendrawtransaction( self.nodes[1].signrawtransactionwithwallet( self.nodes[1].createrawtransaction(utxo_dicts, recipient_dict))['hex']) # send from nodes[2] using utxo to nodes[3] recipient_dict2 = { self.nodes[3].getnewaddress(): 1, self.nodes[2].getnewaddress(): change, } self.nodes[2].sendrawtransaction( self.nodes[2].signrawtransactionwithwallet( self.nodes[2].createrawtransaction(utxo_dicts, recipient_dict2))['hex']) # generate on both sides lastblockhash = self.generate(self.nodes[1], 3)[2] self.generate(self.nodes[2], 4) self.join_network() self.sync_all() # gettransaction should work for txid1 assert self.nodes[0].gettransaction(txid1)['txid'] == txid1, "gettransaction failed to find txid1" # listsinceblock(lastblockhash) should now include txid1, as seen from nodes[0] lsbres = self.nodes[0].listsinceblock(lastblockhash) assert any(tx['txid'] == txid1 for tx in lsbres['removed']) # but it should not include 'removed' if include_removed=false lsbres2 = self.nodes[0].listsinceblock(blockhash=lastblockhash, include_removed=False) assert 'removed' not in lsbres2
def run_test(self): # Turn off node 1 while node 0 mines blocks to generate stakes, # so that we can later try starting node 1 with an orphan proof. self.stop_node(1) node = self.nodes[0] addrkey0 = node.get_deterministic_priv_key() blockhashes = node.generatetoaddress(100, addrkey0.address) self.log.info( "Make build a valid proof and restart the node to use it") privkey = ECKey() privkey.set( bytes.fromhex( "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747" ), True) def get_hex_pubkey(privkey): return privkey.get_pubkey().get_bytes().hex() proof_master = get_hex_pubkey(privkey) proof_sequence = 11 proof_expiration = 12 stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key) proof = node.buildavalancheproof(proof_sequence, proof_expiration, proof_master, stakes) self.log.info("Test decodeavalancheproof RPC") proofobj = FromHex(AvalancheProof(), proof) decodedproof = node.decodeavalancheproof(proof) limited_id_hex = f"{proofobj.limited_proofid:0{64}x}" assert_equal(decodedproof["sequence"], proof_sequence) assert_equal(decodedproof["expiration"], proof_expiration) assert_equal(decodedproof["master"], proof_master) assert_equal(decodedproof["proofid"], f"{proofobj.proofid:0{64}x}") assert_equal(decodedproof["limitedid"], limited_id_hex) assert_equal(decodedproof["stakes"][0]["txid"], stakes[0]["txid"]) assert_equal(decodedproof["stakes"][0]["vout"], stakes[0]["vout"]) assert_equal(decodedproof["stakes"][0]["height"], stakes[0]["height"]) assert_equal(decodedproof["stakes"][0]["iscoinbase"], stakes[0]["iscoinbase"]) assert_equal(decodedproof["stakes"][0]["signature"], base64.b64encode(proofobj.stakes[0].sig).decode("ascii")) # Invalid hex (odd number of hex digits) assert_raises_rpc_error(-22, "Proof must be an hexadecimal string", node.decodeavalancheproof, proof[:-1]) # Valid hex but invalid proof assert_raises_rpc_error(-22, "Proof has invalid format", node.decodeavalancheproof, proof[:-2]) # Restart the node with this proof self.restart_node( 0, self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ]) self.log.info("The proof is registered at first chaintip update") assert_equal(len(node.getavalanchepeerinfo()), 0) node.generate(1) wait_until(lambda: len(node.getavalanchepeerinfo()) == 1, timeout=5) # This case will occur for users building proofs with a third party # tool and then starting a new node that is not yet aware of the # transactions used for stakes. self.log.info("Start a node with an orphan proof") self.start_node( 1, self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ]) # Mine a block to trigger an attempt at registering the proof self.nodes[1].generate(1) wait_for_proof(self.nodes[1], f"{proofobj.proofid:0{64}x}", expect_orphan=True) self.log.info("Connect to an up-to-date node to unorphan the proof") connect_nodes(self.nodes[1], node) self.sync_all() wait_for_proof(self.nodes[1], f"{proofobj.proofid:0{64}x}", expect_orphan=False) self.log.info("Generate delegations for the proof") # Stack up a few delegation levels def gen_privkey(): pk = ECKey() pk.generate() return pk delegator_privkey = privkey delegation = None for _ in range(10): delegated_privkey = gen_privkey() delegation = node.delegateavalancheproof( limited_id_hex, bytes_to_wif(delegator_privkey.get_bytes()), get_hex_pubkey(delegated_privkey), delegation, ) delegator_privkey = delegated_privkey random_privkey = gen_privkey() random_pubkey = get_hex_pubkey(random_privkey) # Invalid proof no_stake = node.buildavalancheproof(proof_sequence, proof_expiration, proof_master, []) # Invalid privkey assert_raises_rpc_error( -5, "The private key is invalid", node.delegateavalancheproof, limited_id_hex, bytes_to_wif(bytes(32)), random_pubkey, ) # Invalid delegation bad_dg = AvalancheDelegation() assert_raises_rpc_error( -8, "The delegation does not match the proof", node.delegateavalancheproof, limited_id_hex, bytes_to_wif(privkey.get_bytes()), random_pubkey, bad_dg.serialize().hex(), ) # Still invalid, but with a matching proofid bad_dg.limited_proofid = proofobj.limited_proofid bad_dg.proof_master = proofobj.master bad_dg.levels = [AvalancheDelegationLevel()] assert_raises_rpc_error( -8, "The delegation is invalid", node.delegateavalancheproof, limited_id_hex, bytes_to_wif(privkey.get_bytes()), random_pubkey, bad_dg.serialize().hex(), ) # Wrong privkey, match the proof but does not match the delegation assert_raises_rpc_error( -5, "The private key does not match the delegation", node.delegateavalancheproof, limited_id_hex, bytes_to_wif(privkey.get_bytes()), random_pubkey, delegation, ) # Delegation not hex assert_raises_rpc_error( -22, "Delegation must be an hexadecimal string.", node.delegateavalancheproof, limited_id_hex, bytes_to_wif(privkey.get_bytes()), random_pubkey, "f00", ) # Delegation is hex but ill-formed assert_raises_rpc_error( -22, "Delegation has invalid format", node.delegateavalancheproof, limited_id_hex, bytes_to_wif(privkey.get_bytes()), random_pubkey, "dead", ) # Test invalid proofs dust = node.buildavalancheproof( proof_sequence, proof_expiration, proof_master, create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key, amount="0")) dust_amount = Decimal(f"{PROOF_DUST_THRESHOLD * 0.9999:.4f}") dust2 = node.buildavalancheproof( proof_sequence, proof_expiration, proof_master, create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key, amount=str(dust_amount))) duplicate_stake = node.buildavalancheproof( proof_sequence, proof_expiration, proof_master, create_coinbase_stakes(node, [blockhashes[0]] * 2, addrkey0.key)) missing_stake = node.buildavalancheproof( proof_sequence, proof_expiration, proof_master, [{ 'txid': '0' * 64, 'vout': 0, 'amount': 10000000, 'height': 42, 'iscoinbase': False, 'privatekey': addrkey0.key, }]) bad_sig = ( "0b000000000000000c0000000000000021030b4c866585dd868a9d62348" "a9cd008d6a312937048fff31670e7e920cfc7a7440105c5f72f5d6da3085" "583e75ee79340eb4eff208c89988e7ed0efb30b87298fa30000000000f20" "52a0100000003000000210227d85ba011276cf25b51df6a188b75e604b3" "8770a462b2d0e9fb2fc839ef5d3faf07f001dd38e9b4a43d07d5d449cc0" "f7d2888d96b82962b3ce516d1083c0e031773487fc3c4f2e38acd1db974" "1321b91a79b82d1c2cfd47793261e4ba003cf5") self.log.info( "Check the verifyavalancheproof and sendavalancheproof RPCs") if self.is_wallet_compiled(): self.log.info( "Check a proof with the maximum number of UTXO is valid") new_blocks = node.generate(AVALANCHE_MAX_PROOF_STAKES // 10 + 1) # confirm the coinbase UTXOs node.generate(101) too_many_stakes = create_stakes(node, new_blocks, AVALANCHE_MAX_PROOF_STAKES + 1) maximum_stakes = too_many_stakes[:-1] good_proof = node.buildavalancheproof(proof_sequence, proof_expiration, proof_master, maximum_stakes) too_many_utxos = node.buildavalancheproof(proof_sequence, proof_expiration, proof_master, too_many_stakes) assert node.verifyavalancheproof(good_proof) for rpc in [node.verifyavalancheproof, node.sendavalancheproof]: assert_raises_rpc_error(-22, "Proof must be an hexadecimal string", rpc, "f00") assert_raises_rpc_error(-22, "Proof has invalid format", rpc, "f00d") def check_rpc_failure(proof, message): assert_raises_rpc_error(-8, "The proof is invalid: " + message, rpc, proof) check_rpc_failure(no_stake, "no-stake") check_rpc_failure(dust, "amount-below-dust-threshold") check_rpc_failure(duplicate_stake, "duplicated-stake") check_rpc_failure(missing_stake, "utxo-missing-or-spent") check_rpc_failure(bad_sig, "invalid-signature") if self.is_wallet_compiled(): check_rpc_failure(too_many_utxos, "too-many-utxos") conflicting_utxo = node.buildavalancheproof(proof_sequence + 1, proof_expiration, proof_master, stakes) assert_raises_rpc_error( -8, "The proof has conflicting utxo with an existing proof", node.sendavalancheproof, conflicting_utxo) # Good proof assert node.verifyavalancheproof(proof) peer = node.add_p2p_connection(P2PInterface()) proofid = FromHex(AvalancheProof(), proof).proofid node.sendavalancheproof(proof) assert proofid in get_proof_ids(node) def inv_found(): with p2p_lock: return peer.last_message.get( "inv") and peer.last_message["inv"].inv[-1].hash == proofid wait_until(inv_found) self.log.info("Check the getrawproof RPC") raw_proof = node.getrawavalancheproof("{:064x}".format(proofid)) assert_equal(raw_proof['proof'], proof) assert_equal(raw_proof['orphan'], False) assert_raises_rpc_error(-8, "Proof not found", node.getrawavalancheproof, '0' * 64) # Orphan the proof by sending the stake raw_tx = node.createrawtransaction([{ "txid": stakes[-1]["txid"], "vout": 0 }], { ADDRESS_BCHREG_UNSPENDABLE: stakes[-1]["amount"] - Decimal('10000') }) signed_tx = node.signrawtransactionwithkey(raw_tx, [addrkey0.key]) node.sendrawtransaction(signed_tx["hex"]) node.generate(1) wait_until(lambda: proofid not in get_proof_ids(node)) raw_proof = node.getrawavalancheproof("{:064x}".format(proofid)) assert_equal(raw_proof['proof'], proof) assert_equal(raw_proof['orphan'], True) self.log.info("Bad proof should be rejected at startup") self.stop_node(0) node.assert_start_raises_init_error( self.extra_args[0] + [ "-avasessionkey=0", ], expected_msg="Error: The avalanche session key is invalid.", ) node.assert_start_raises_init_error( self.extra_args[0] + [ "-avaproof={}".format(proof), ], expected_msg= "Error: The avalanche master key is missing for the avalanche proof.", ) node.assert_start_raises_init_error( self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=0", ], expected_msg="Error: The avalanche master key is invalid.", ) def check_proof_init_error(proof, message): node.assert_start_raises_init_error( self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ], expected_msg="Error: " + message, ) check_proof_init_error(no_stake, "The avalanche proof has no stake.") check_proof_init_error(dust, "The avalanche proof stake is too low.") check_proof_init_error(dust2, "The avalanche proof stake is too low.") check_proof_init_error(duplicate_stake, "The avalanche proof has duplicated stake.") check_proof_init_error( bad_sig, "The avalanche proof has invalid stake signatures.") if self.is_wallet_compiled(): # The too many utxos case creates a proof which is that large that it # cannot fit on the command line append_config(node.datadir, ["avaproof={}".format(too_many_utxos)]) node.assert_start_raises_init_error( self.extra_args[0] + [ "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN", ], expected_msg="Error: The avalanche proof has too many utxos.", match=ErrorMatch.PARTIAL_REGEX, ) # Master private key mismatch random_privkey = ECKey() random_privkey.generate() node.assert_start_raises_init_error( self.extra_args[0] + [ "-avaproof={}".format(proof), "-avamasterkey={}".format( bytes_to_wif(random_privkey.get_bytes())), ], expected_msg= "Error: The master key does not match the proof public key.", ) self.log.info("Bad delegation should be rejected at startup") def check_delegation_init_error(delegation, message): node.assert_start_raises_init_error( self.extra_args[0] + [ "-avadelegation={}".format(delegation), "-avaproof={}".format(proof), "-avamasterkey={}".format( bytes_to_wif(delegated_privkey.get_bytes())), ], expected_msg="Error: " + message, ) check_delegation_init_error( AvalancheDelegation().serialize().hex(), "The delegation does not match the proof.") bad_level_sig = FromHex(AvalancheDelegation(), delegation) # Tweak some key to cause the signature to mismatch bad_level_sig.levels[-2].pubkey = bytes.fromhex(proof_master) check_delegation_init_error( bad_level_sig.serialize().hex(), "The avalanche delegation has invalid signatures.") node.assert_start_raises_init_error( self.extra_args[0] + [ "-avadelegation={}".format(delegation), "-avaproof={}".format(proof), "-avamasterkey={}".format( bytes_to_wif(random_privkey.get_bytes())), ], expected_msg= "Error: The master key does not match the delegation public key.", )
def run_test(self): p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) # Build the blockchain self.tip = int(self.nodes[0].getbestblockhash(), 16) self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1 self.blocks = [] # Get a pubkey for the coinbase TXO coinbase_key = ECKey() coinbase_key.generate() coinbase_pubkey = coinbase_key.get_pubkey().get_bytes() # Create the first block with a coinbase output to our key height = 1 block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time) self.blocks.append(block) self.block_time += 1 block.solve() # Save the coinbase for later self.block1 = block self.tip = block.sha256 height += 1 # Bury the block 100 deep so the coinbase output is spendable for i in range(100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 height += 1 # Create a transaction spending the coinbase output with an invalid (null) signature tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b"")) tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE]))) tx.calc_sha256() block102 = create_block(self.tip, create_coinbase(height), self.block_time) self.block_time += 1 block102.vtx.extend([tx]) block102.hashMerkleRoot = block102.calc_merkle_root() block102.rehash() block102.solve() self.blocks.append(block102) self.tip = block102.sha256 self.block_time += 1 height += 1 # Bury the assumed valid block 2100 deep for i in range(2100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.set_base_version(4) block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 height += 1 self.nodes[0].disconnect_p2ps() # Start node1 and node2 with assumevalid so they accept a block with a bad signature. self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)]) self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)]) p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) p2p1 = self.nodes[1].add_p2p_connection(BaseNode()) p2p2 = self.nodes[2].add_p2p_connection(BaseNode()) # send header lists to all three nodes p2p0.send_header_for_blocks(self.blocks[0:2000]) p2p0.send_header_for_blocks(self.blocks[2000:]) p2p1.send_header_for_blocks(self.blocks[0:2000]) p2p1.send_header_for_blocks(self.blocks[2000:]) p2p2.send_header_for_blocks(self.blocks[0:200]) # Send blocks to node0. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p0) self.assert_blockchain_height(self.nodes[0], 101) # Send all blocks to node1. All blocks will be accepted. for i in range(2202): p2p1.send_message(msg_block(self.blocks[i])) # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync. p2p1.sync_with_ping(200) assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202) # Send blocks to node2. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p2) self.assert_blockchain_height(self.nodes[2], 101)
def run_test(self): self.log.info("Mining blocks...") self.generate(self.nodes[0], COINBASE_MATURITY + 1) # address address1 = self.nodes[0].getnewaddress() # pubkey address2 = self.nodes[0].getnewaddress() # privkey eckey = ECKey() eckey.generate() address3_privkey = bytes_to_wif(eckey.get_bytes()) address3 = key_to_p2wpkh(eckey.get_pubkey().get_bytes()) self.nodes[0].importprivkey(address3_privkey) # Check only one address address_info = self.nodes[0].getaddressinfo(address1) assert_equal(address_info['ismine'], True) self.sync_all() # Node 1 sync test assert_equal(self.nodes[1].getblockcount(), COINBASE_MATURITY + 1) # Address Test - before import address_info = self.nodes[1].getaddressinfo(address1) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) address_info = self.nodes[1].getaddressinfo(address2) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) address_info = self.nodes[1].getaddressinfo(address3) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) # Send funds to self txnid1 = self.nodes[0].sendtoaddress(address1, 0.1) self.generate(self.nodes[0], 1) rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex'] proof1 = self.nodes[0].gettxoutproof([txnid1]) txnid2 = self.nodes[0].sendtoaddress(address2, 0.05) self.generate(self.nodes[0], 1) rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex'] proof2 = self.nodes[0].gettxoutproof([txnid2]) txnid3 = self.nodes[0].sendtoaddress(address3, 0.025) self.generate(self.nodes[0], 1) rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex'] proof3 = self.nodes[0].gettxoutproof([txnid3]) self.sync_all() # Import with no affiliated address assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1) balance1 = self.nodes[1].getbalance() assert_equal(balance1, Decimal(0)) # Import with affiliated address with no rescan self.nodes[1].createwallet('wwatch', disable_private_keys=True) wwatch = self.nodes[1].get_wallet_rpc('wwatch') wwatch.importaddress(address=address2, rescan=False) wwatch.importprunedfunds(rawtransaction=rawtxn2, txoutproof=proof2) assert [ tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2 ] # Import with private key with no rescan w1 = self.nodes[1].get_wallet_rpc(self.default_wallet_name) w1.importprivkey(privkey=address3_privkey, rescan=False) w1.importprunedfunds(rawtxn3, proof3) assert [tx for tx in w1.listtransactions() if tx['txid'] == txnid3] balance3 = w1.getbalance() assert_equal(balance3, Decimal('0.025')) # Addresses Test - after import address_info = w1.getaddressinfo(address1) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) address_info = wwatch.getaddressinfo(address2) if self.options.descriptors: assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], True) else: assert_equal(address_info['iswatchonly'], True) assert_equal(address_info['ismine'], False) address_info = w1.getaddressinfo(address3) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], True) # Remove transactions assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", w1.removeprunedfunds, txnid1) assert not [ tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid1 ] wwatch.removeprunedfunds(txnid2) assert not [ tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2 ] w1.removeprunedfunds(txnid3) assert not [ tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid3 ] # Check various RPC parameter validation errors assert_raises_rpc_error(-22, "TX decode failed", w1.importprunedfunds, b'invalid tx'.hex(), proof1) assert_raises_rpc_error(-5, "Transaction given doesn't exist in proof", w1.importprunedfunds, rawtxn2, proof1) mb = from_hex(CMerkleBlock(), proof1) mb.header.hashMerkleRoot = 0xdeadbeef # cause mismatch between merkle root and merkle block assert_raises_rpc_error(-5, "Something wrong with merkleblock", w1.importprunedfunds, rawtxn1, mb.serialize().hex()) mb = from_hex(CMerkleBlock(), proof1) mb.header.nTime += 1 # modify arbitrary block header field to change block hash assert_raises_rpc_error(-5, "Block not found in chain", w1.importprunedfunds, rawtxn1, mb.serialize().hex())