def p2pkh_address_to_script(self,v): pubkey = hex_str_to_bytes(v['pubkey']) p2wpkh = CScript([OP_0, hash160(pubkey)]) p2sh_p2wpkh = CScript([OP_HASH160, hash160(p2wpkh), OP_EQUAL]) p2pk = CScript([pubkey, OP_CHECKSIG]) p2pkh = CScript(hex_str_to_bytes(v['scriptPubKey'])) p2sh_p2pk = CScript([OP_HASH160, hash160(p2pk), OP_EQUAL]) p2sh_p2pkh = CScript([OP_HASH160, hash160(p2pkh), OP_EQUAL]) p2wsh_p2pk = CScript([OP_0, sha256(p2pk)]) p2wsh_p2pkh = CScript([OP_0, sha256(p2pkh)]) p2sh_p2wsh_p2pk = CScript([OP_HASH160, hash160(p2wsh_p2pk), OP_EQUAL]) p2sh_p2wsh_p2pkh = CScript([OP_HASH160, hash160(p2wsh_p2pkh), OP_EQUAL]) return [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]
def assert_tx_format_also_signed(self, utxo, segwit): raw = self.nodes[0].createrawtransaction( [{"txid": utxo["txid"], "vout": utxo["vout"]}], [{self.unknown_addr: "49.9"}, {"fee": "0.1"}] ) unsigned_decoded = self.nodes[0].decoderawtransaction(raw) assert_equal(len(unsigned_decoded["vin"]), 1) assert('txinwitness' not in unsigned_decoded["vin"][0]) # Cross-check python serialization tx = CTransaction() tx.deserialize(BytesIO(hex_str_to_bytes(raw))) assert_equal(tx.vin[0].prevout.hash, int("0x"+utxo["txid"], 0)) assert_equal(len(tx.vin), len(unsigned_decoded["vin"])) assert_equal(len(tx.vout), len(unsigned_decoded["vout"])) # assert re-encoding serialized = bytes_to_hex_str(tx.serialize()) assert_equal(serialized, raw) # Now sign and repeat tests signed_raw = self.nodes[0].signrawtransactionwithwallet(raw)["hex"] signed_decoded = self.nodes[0].decoderawtransaction(signed_raw) assert_equal(len(signed_decoded["vin"]), 1) assert(("txinwitness" in signed_decoded["vin"][0]) == segwit) # Cross-check python serialization tx = CTransaction() tx.deserialize(BytesIO(hex_str_to_bytes(signed_raw))) assert_equal(tx.vin[0].prevout.hash, int("0x"+utxo["txid"], 0)) assert_equal(bytes_to_hex_str(tx.vin[0].scriptSig), signed_decoded["vin"][0]["scriptSig"]["hex"]) # test witness if segwit: wit_decoded = signed_decoded["vin"][0]["txinwitness"] for i in range(len(wit_decoded)): assert_equal(bytes_to_hex_str(tx.wit.vtxinwit[0].scriptWitness.stack[i]), wit_decoded[i]) # assert re-encoding serialized = bytes_to_hex_str(tx.serialize()) assert_equal(serialized, signed_raw) txid = self.nodes[0].sendrawtransaction(serialized) nodetx = self.nodes[0].getrawtransaction(txid, 1) assert_equal(nodetx["txid"], tx.rehash()) # cross-check wtxid report from node wtxid = bytes_to_hex_str(ser_uint256(tx.calc_sha256(True))[::-1]) assert_equal(nodetx["wtxid"], wtxid) assert_equal(nodetx["hash"], wtxid) # witness hash stuff assert_equal(nodetx["withash"], tx.calc_witness_hash()) return (txid, wtxid)
def buildDummySegwitNameUpdate (self, name, value, addr): """ Builds a transaction that updates the given name to the given value and address. We assume that the name is at a native segwit script. The witness of the transaction will be set to two dummy stack elements so that the program itself is "well-formed" even if it won't execute successfully. """ data = self.node.name_show (name) u = self.findUnspent (Decimal ('0.01')) ins = [data, u] outs = {addr: Decimal ('0.01')} txHex = self.node.createrawtransaction (ins, outs) nameOp = {"op": "name_update", "name": name, "value": value} txHex = self.node.namerawtransaction (txHex, 0, nameOp)['hex'] txHex = self.node.signrawtransactionwithwallet (txHex)['hex'] tx = CTransaction () tx.deserialize (io.BytesIO (hex_str_to_bytes (txHex))) tx.wit = CTxWitness () tx.wit.vtxinwit.append (CTxInWitness ()) tx.wit.vtxinwit[0].scriptWitness = CScriptWitness () tx.wit.vtxinwit[0].scriptWitness.stack = [b"dummy"] * 2 txHex = tx.serialize ().hex () return txHex
def sign_transaction(node, unsignedtx): rawtx = ToHex(unsignedtx) signresult = node.signrawtransactionwithwallet(rawtx) tx = CTransaction() f = BytesIO(hex_str_to_bytes(signresult['hex'])) tx.deserialize(f) return tx
def create_transaction(node, txid, to_address, amount): inputs = [{"txid": txid, "vout": 0}] outputs = {to_address: amount} rawtx = node.createrawtransaction(inputs, outputs) tx = CTransaction() f = BytesIO(hex_str_to_bytes(rawtx)) tx.deserialize(f) return tx
def setScriptSigOps (self, txHex, ind, scriptSigOps): """ Update the given hex transaction by setting the scriptSig for the input with the given index. """ tx = CTransaction () tx.deserialize (io.BytesIO (hex_str_to_bytes (txHex))) tx.vin[ind].scriptSig = CScript (scriptSigOps) return tx.serialize ().hex ()
def addAuxpow (self, block, blkHash, ok): """ Fills in the auxpow for the given block message. It is either chosen to be valid (ok = True) or invalid (ok = False). """ target = b"%064x" % uint256_from_compact (block.nBits) auxpowHex = computeAuxpow (blkHash, target, ok) block.auxpow = CAuxPow () block.auxpow.deserialize (BytesIO (hex_str_to_bytes (auxpowHex))) return block
def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])): """Create a txout with a given amount and scriptPubKey Mines coins as needed. confirmed - txouts created will be confirmed in the blockchain; unconfirmed otherwise. """ fee = 1*COIN while node.getbalance()['bitcoin'] < satoshi_round((amount + fee)/COIN): node.generate(100) new_addr = node.getnewaddress() unblinded_addr = node.validateaddress(new_addr)["unconfidential"] txidstr = node.sendtoaddress(new_addr, satoshi_round((amount+fee)/COIN)) tx1 = node.getrawtransaction(txidstr, 1) txid = int(txidstr, 16) i = None for i, txout in enumerate(tx1['vout']): if txout['scriptPubKey']['type'] == "fee": continue # skip fee outputs if txout['scriptPubKey']['addresses'] == [unblinded_addr]: break assert i is not None tx2 = CTransaction() tx2.vin = [CTxIn(COutPoint(txid, i))] tx1raw = CTransaction() tx1raw.deserialize(BytesIO(hex_str_to_bytes(node.getrawtransaction(txidstr)))) feeout = CTxOut(CTxOutValue(tx1raw.vout[i].nValue.getAmount() - amount)) tx2.vout = [CTxOut(amount, scriptPubKey), feeout] tx2.rehash() signed_tx = node.signrawtransactionwithwallet(txToHex(tx2)) txid = node.sendrawtransaction(signed_tx['hex'], True) # If requested, ensure txouts are confirmed. if confirmed: mempool_size = len(node.getrawmempool()) while mempool_size > 0: node.generate(1) new_size = len(node.getrawmempool()) # Error out if we have something stuck in the mempool, as this # would likely be a bug. assert(new_size < mempool_size) mempool_size = new_size return COutPoint(int(txid, 16), 0)
def submit_block_with_tx(node, tx): ctx = CTransaction() ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx))) tip = node.getbestblockhash() height = node.getblockcount() + 1 block_time = node.getblockheader(tip)["mediantime"] + 1 block = create_block(int(tip, 16), create_coinbase(height), block_time) block.vtx.append(ctx) block.rehash() block.hashMerkleRoot = block.calc_merkle_root() add_witness_commitment(block) block.solve() node.submitblock(block.serialize(True).hex()) return block
def create_and_mine_tx_from_txids(self, txids, success = True): tx = CTransaction() for i in txids: txtmp = CTransaction() txraw = self.nodes[0].getrawtransaction(i) f = BytesIO(hex_str_to_bytes(txraw)) txtmp.deserialize(f) for j in range(len(txtmp.vout)): tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j))) tx.vout.append(CTxOut(0, CScript())) tx.rehash() signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex'] self.nodes[0].sendrawtransaction(signresults, True) self.nodes[0].generate(1) sync_blocks(self.nodes)
def cltv_validate(node, tx, height): '''Modify the signature in vin 0 of the tx to pass CLTV Prepends <height> CLTV DROP in the scriptSig, and sets the locktime to height''' tx.vin[0].nSequence = 0 tx.nLockTime = height # Need to re-sign, since nSequence and nLockTime changed signed_result = node.signrawtransactionwithwallet(ToHex(tx)) new_tx = CTransaction() new_tx.deserialize(BytesIO(hex_str_to_bytes(signed_result['hex']))) new_tx.vin[0].scriptSig = CScript([CScriptNum(height), OP_CHECKLOCKTIMEVERIFY, OP_DROP] + list(CScript(new_tx.vin[0].scriptSig))) return new_tx
def get_key(node): """Generate a fresh key on node Returns a named tuple of privkey, pubkey and all address and scripts.""" addr = node.getnewaddress() pubkey = node.getaddressinfo(addr)['pubkey'] pkh = hash160(hex_str_to_bytes(pubkey)) return Key(privkey=node.dumpprivkey(addr), pubkey=pubkey, p2pkh_script=CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(), p2pkh_addr=key_to_p2pkh(pubkey), p2wpkh_script=CScript([OP_0, pkh]).hex(), p2wpkh_addr=key_to_p2wpkh(pubkey), p2sh_p2wpkh_script=CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(), p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(), p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
def get_key(self): """Generate a fresh key on node0 Returns a named tuple of privkey, pubkey and all address and scripts.""" addr = self.nodes[0].getnewaddress() pubkey = self.nodes[0].getaddressinfo(addr)['pubkey'] pkh = hash160(hex_str_to_bytes(pubkey)) return Key(self.nodes[0].dumpprivkey(addr), pubkey, CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(), # p2pkh key_to_p2pkh(pubkey), # p2pkh addr CScript([OP_0, pkh]).hex(), # p2wpkh key_to_p2wpkh(pubkey), # p2wpkh addr CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(), # p2sh-p2wpkh CScript([OP_0, pkh]).hex(), # p2sh-p2wpkh redeem script key_to_p2sh_p2wpkh(pubkey)) # p2sh-p2wpkh addr
def run_test (self): node = self.nodes[0] p2pStore = node.add_p2p_connection (P2PDataStore ()) p2pGetter = node.add_p2p_connection (P2PBlockGetter ()) self.log.info ("Adding a block with non-zero hash in the auxpow...") blk, blkHash = self.createBlock () blk.auxpow.hashBlock = 12345678 blkHex = blk.serialize ().hex () assert_equal (node.submitblock (blkHex), None) assert_equal (node.getbestblockhash (), blkHash) self.log.info ("Retrieving block through RPC...") gotHex = node.getblock (blkHash, 0) assert gotHex != blkHex gotBlk = CBlock () gotBlk.deserialize (BytesIO (hex_str_to_bytes (gotHex))) assert_equal (gotBlk.auxpow.hashBlock, 0) self.log.info ("Retrieving block through P2P...") gotBlk = p2pGetter.getBlock (blkHash) assert_equal (gotBlk.auxpow.hashBlock, 0) self.log.info ("Sending zero-hash auxpow through RPC...") blk, blkHash = self.createBlock () blk.auxpow.hashBlock = 0 assert_equal (node.submitblock (blk.serialize ().hex ()), None) assert_equal (node.getbestblockhash (), blkHash) self.log.info ("Sending zero-hash auxpow through P2P...") blk, blkHash = self.createBlock () blk.auxpow.hashBlock = 0 p2pStore.send_blocks_and_test ([blk], node, success=True) assert_equal (node.getbestblockhash (), blkHash) self.log.info ("Sending non-zero nIndex auxpow through RPC...") blk, blkHash = self.createBlock () blk.auxpow.nIndex = 42 assert_equal (node.submitblock (blk.serialize ().hex ()), None) assert_equal (node.getbestblockhash (), blkHash) self.log.info ("Sending non-zero nIndex auxpow through P2P...") blk, blkHash = self.createBlock () blk.auxpow.nIndex = 42 p2pStore.send_blocks_and_test ([blk], node, success=True) assert_equal (node.getbestblockhash (), blkHash)
def get_multisig(node): """Generate a fresh 2-of-3 multisig on node Returns a named tuple of privkeys, pubkeys and all address and scripts.""" addrs = [] pubkeys = [] for _ in range(3): addr = node.getaddressinfo(node.getnewaddress()) addrs.append(addr['address']) pubkeys.append(addr['pubkey']) script_code = CScript([OP_2] + [hex_str_to_bytes(pubkey) for pubkey in pubkeys] + [OP_3, OP_CHECKMULTISIG]) witness_script = CScript([OP_0, sha256(script_code)]) return Multisig(privkeys=[node.dumpprivkey(addr) for addr in addrs], pubkeys=pubkeys, p2sh_script=CScript([OP_HASH160, hash160(script_code), OP_EQUAL]).hex(), p2sh_addr=script_to_p2sh(script_code), redeem_script=script_code.hex(), p2wsh_script=witness_script.hex(), p2wsh_addr=script_to_p2wsh(script_code), p2sh_p2wsh_script=CScript([OP_HASH160, witness_script, OP_EQUAL]).hex(), p2sh_p2wsh_addr=script_to_p2sh_p2wsh(script_code))
def get_multisig(self): """Generate a fresh multisig on node0 Returns a named tuple of privkeys, pubkeys and all address and scripts.""" addrs = [] pubkeys = [] for _ in range(3): addr = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) addrs.append(addr['address']) pubkeys.append(addr['pubkey']) script_code = CScript([OP_2] + [hex_str_to_bytes(pubkey) for pubkey in pubkeys] + [OP_3, OP_CHECKMULTISIG]) witness_script = CScript([OP_0, sha256(script_code)]) return Multisig([self.nodes[0].dumpprivkey(addr) for addr in addrs], pubkeys, CScript([OP_HASH160, hash160(script_code), OP_EQUAL]).hex(), # p2sh script_to_p2sh(script_code), # p2sh addr script_code.hex(), # redeem script witness_script.hex(), # p2wsh script_to_p2wsh(script_code), # p2wsh addr CScript([OP_HASH160, witness_script, OP_EQUAL]).hex(), # p2sh-p2wsh script_to_p2sh_p2wsh(script_code)) # p2sh-p2wsh addr
def createBlock (self): """ Creates and mines a new block with auxpow. """ bestHash = self.nodes[0].getbestblockhash () bestBlock = self.nodes[0].getblock (bestHash) tip = int (bestHash, 16) height = bestBlock["height"] + 1 time = bestBlock["time"] + 1 block = create_block (tip, create_coinbase (height), time) block.mark_auxpow () block.rehash () newHash = "%064x" % block.sha256 target = b"%064x" % uint256_from_compact (block.nBits) auxpowHex = computeAuxpow (newHash, target, True) block.auxpow = CAuxPow () block.auxpow.deserialize (BytesIO (hex_str_to_bytes (auxpowHex))) return block, newHash
def test_transaction_serialization(self): legacy_addr = self.nodes[0].getnewaddress("", "legacy") p2sh_addr = self.nodes[0].getnewaddress("", "p2sh-segwit") bech32_addr = self.nodes[0].getnewaddress("", "bech32") self.unknown_addr = self.nodes[1].getnewaddress() # directly seed types of utxos required self.nodes[0].generatetoaddress(1, legacy_addr) self.nodes[0].generatetoaddress(1, p2sh_addr) self.nodes[0].generatetoaddress(1, bech32_addr) self.nodes[0].generatetoaddress(101, self.unknown_addr) # grab utxos filtering by age legacy_utxo = self.nodes[0].listunspent(104, 104)[0] p2sh_utxo = self.nodes[0].listunspent(103, 103)[0] bech32_utxo = self.nodes[0].listunspent(102, 102)[0] submitted_txids = [] self.log.info("Testing legacy UTXO") submitted_txids.append(self.assert_tx_format_also_signed(legacy_utxo, segwit=False)) self.log.info("Testing p2sh UTXO") submitted_txids.append(self.assert_tx_format_also_signed(p2sh_utxo, segwit=True)) self.log.info("Testing bech32 UTXO") submitted_txids.append(self.assert_tx_format_also_signed(bech32_utxo, segwit=True)) blockhash = self.nodes[0].generate(1)[0] hexblock = self.nodes[0].getblock(blockhash, 0) block_details = self.nodes[0].getblock(blockhash, 2) block = CBlock() block.deserialize(BytesIO(hex_str_to_bytes(hexblock))) assert(len(block.vtx) == len(submitted_txids) + 1) assert_equal(len(block_details["tx"]), len(block.vtx)) for tx1, tx2 in zip(block.vtx[1:], block_details["tx"][1:]): # no tuple wildcard, just re-used tx2 on first one assert((tx1.rehash(), tx2["wtxid"]) in submitted_txids) assert((tx2["txid"], tx2["hash"]) in submitted_txids) assert((tx2["txid"], tx2["wtxid"]) in submitted_txids) block.rehash() assert_equal(block.hash, self.nodes[0].getbestblockhash())
def witness_script_test(self): # Now test signing transaction to P2SH-P2WSH addresses without wallet # Create a new P2SH-P2WSH 1-of-1 multisig address: embedded_address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress()) embedded_privkey = self.nodes[1].dumpprivkey(embedded_address["address"]) p2sh_p2wsh_address = self.nodes[1].addmultisigaddress(1, [embedded_address["pubkey"]], "", "p2sh-segwit") # send transaction to P2SH-P2WSH 1-of-1 multisig address self.nodes[0].generate(101) self.nodes[0].sendtoaddress(p2sh_p2wsh_address["address"], 49.999) self.nodes[0].generate(1) self.sync_all() # Find the UTXO for the transaction node[1] should have received, check witnessScript matches unspent_output = self.nodes[1].listunspent(0, 999999, [p2sh_p2wsh_address["address"]])[0] assert_equal(unspent_output["witnessScript"], p2sh_p2wsh_address["redeemScript"]) p2sh_redeemScript = CScript([OP_0, sha256(hex_str_to_bytes(p2sh_p2wsh_address["redeemScript"]))]) assert_equal(unspent_output["redeemScript"], p2sh_redeemScript.hex()) # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys spending_tx = self.nodes[0].createrawtransaction([unspent_output], {self.nodes[1].getnewaddress(): Decimal("49.998")}) spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output]) # Check the signing completed successfully assert 'complete' in spending_tx_signed assert_equal(spending_tx_signed['complete'], True)
def sign_stake_tx(self, block, stake_in_value, fZPoS=False): ''' signs a coinstake transaction :param block: (CBlock) block with stake to sign stake_in_value: (int) staked amount fZPoS: (bool) zerocoin stake :return: stake_tx_signed: (CTransaction) signed tx ''' self.block_sig_key = CECKey() if fZPoS: self.log.info("Signing zPoS stake...") # Create raw zerocoin stake TX (signed) raw_stake = self.node.createrawzerocoinstake(block.prevoutStake) stake_tx_signed_raw_hex = raw_stake["hex"] # Get stake TX private key to sign the block with stake_pkey = raw_stake["private-key"] self.block_sig_key.set_compressed(True) self.block_sig_key.set_secretbytes(bytes.fromhex(stake_pkey)) else: # Create a new private key and get the corresponding public key self.block_sig_key.set_secretbytes(hash256(pack('<I', 0xffff))) pubkey = self.block_sig_key.get_pubkey() # Create the raw stake TX (unsigned) scriptPubKey = CScript([pubkey, OP_CHECKSIG]) outNValue = int(stake_in_value + 2*COIN) stake_tx_unsigned = CTransaction() stake_tx_unsigned.nTime = block.nTime stake_tx_unsigned.vin.append(CTxIn(block.prevoutStake)) stake_tx_unsigned.vin[0].nSequence = 0xffffffff stake_tx_unsigned.vout.append(CTxOut()) stake_tx_unsigned.vout.append(CTxOut(outNValue, scriptPubKey)) # Sign the stake TX stake_tx_signed_raw_hex = self.node.signrawtransaction(bytes_to_hex_str(stake_tx_unsigned.serialize()))['hex'] # Deserialize the signed raw tx into a CTransaction object and return it stake_tx_signed = CTransaction() stake_tx_signed.deserialize(BytesIO(hex_str_to_bytes(stake_tx_signed_raw_hex))) return stake_tx_signed
def tryUpdateInBlock (self, name, value, addr, withWitness): """ Tries to update the given name with a dummy witness directly in a block (to bypass any checks done on the mempool). """ txHex = self.buildDummySegwitNameUpdate (name, value, addr) tx = CTransaction () tx.deserialize (io.BytesIO (hex_str_to_bytes (txHex))) tip = self.node.getbestblockhash () height = self.node.getblockcount () + 1 nTime = self.node.getblockheader (tip)["mediantime"] + 1 block = create_block (int (tip, 16), create_coinbase (height), nTime, version=4) block.vtx.append (tx) add_witness_commitment (block, 0) block.solve () blkHex = block.serialize (withWitness).hex () return self.node.submitblock (blkHex)
def generate_small_transactions(self, node, count, utxo_list): FEE = 1000 # TODO: replace this with node relay fee based calculation num_transactions = 0 random.shuffle(utxo_list) while len(utxo_list) >= 2 and num_transactions < count: tx = CTransaction() input_amount = 0 for i in range(2): utxo = utxo_list.pop() tx.vin.append(CTxIn(COutPoint(int(utxo['txid'], 16), utxo['vout']))) input_amount += int(utxo['amount'] * COIN) output_amount = (input_amount - FEE) // 3 if output_amount <= 0: # Sanity check -- if we chose inputs that are too small, skip continue for i in range(3): tx.vout.append(CTxOut(output_amount, hex_str_to_bytes(utxo['scriptPubKey']))) # Sign and send the transaction to get into the mempool tx_signed_hex = node.signrawtransactionwithwallet(ToHex(tx))['hex'] node.sendrawtransaction(tx_signed_hex) num_transactions += 1
def run_test(self): self.log.info('prepare some coins for multiple *rawtransaction commands') self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(101) self.sync_all() self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0) self.sync_all() self.nodes[0].generate(5) self.sync_all() self.log.info('Test getrawtransaction on genesis block coinbase returns an error') block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot']) self.log.info('Check parameter types and required parameters of createrawtransaction') # Test `createrawtransaction` required parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction) assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, []) # Test `createrawtransaction` invalid extra parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo') # Test `createrawtransaction` invalid `inputs` txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {}) assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {}) assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {}) assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {}) assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {}) assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {}) # Test `createrawtransaction` invalid `outputs` address = self.nodes[0].getnewaddress() address2 = self.nodes[0].getnewaddress() assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo') self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility self.nodes[0].createrawtransaction(inputs=[], outputs=[]) assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'}) assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0}) assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'}) assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1}) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)])) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")])) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}]) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']]) # Test `createrawtransaction` invalid `locktime` assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo') assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1) assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296) # Test `createrawtransaction` invalid `replaceable` assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo') self.log.info('Check that createrawtransaction accepts an array and object as outputs') tx = CTransaction() # One output tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99})))) assert_equal(len(tx.vout), 1) assert_equal( bytes_to_hex_str(tx.serialize()), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]), ) # Two outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)]))))) assert_equal(len(tx.vout), 2) assert_equal( bytes_to_hex_str(tx.serialize()), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]), ) # Multiple mixed outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')]))))) assert_equal(len(tx.vout), 3) assert_equal( bytes_to_hex_str(tx.serialize()), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]), ) for type in ["bech32", "p2sh-segwit", "legacy"]: addr = self.nodes[0].getnewaddress("", type) addrinfo = self.nodes[0].getaddressinfo(addr) pubkey = addrinfo["scriptPubKey"] self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type)) # Test `signrawtransactionwithwallet` invalid `prevtxs` inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1) succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) assert succ["complete"] if type == "legacy": del prevtx["amount"] succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) assert succ["complete"] if type != "legacy": assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "vout": 3, } ]) assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "scriptPubKey": pubkey, "vout": 3, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "vout": 3, "amount": 1 } ]) ######################################### # sendrawtransaction with missing input # ######################################### self.log.info('sendrawtransaction with missing input') inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists outputs = { self.nodes[0].getnewaddress() : 4.998 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx) # This will raise an exception since there are missing inputs assert_raises_rpc_error(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex']) ##################################### # getrawtransaction with block hash # ##################################### # make a tx by sending then generate 2 blocks; block1 has the tx in it tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) block1, block2 = self.nodes[2].generate(2) self.sync_all() # We should be able to get the raw transaction by providing the correct block gottx = self.nodes[0].getrawtransaction(tx, True, block1) assert_equal(gottx['txid'], tx) assert_equal(gottx['in_active_chain'], True) # We should not have the 'in_active_chain' flag when we don't provide a block gottx = self.nodes[0].getrawtransaction(tx, True) assert_equal(gottx['txid'], tx) assert 'in_active_chain' not in gottx # We should not get the tx if we provide an unrelated block assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2) # An invalid block hash should raise the correct errors assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True) assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar") assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234") assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000") assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000") # Undo the blocks and check in_active_chain self.nodes[0].invalidateblock(block1) gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1) assert_equal(gottx['in_active_chain'], False) self.nodes[0].reconsiderblock(block1) assert_equal(self.nodes[0].getbestblockhash(), block2) ######################### # RAW TX MULTISIG TESTS # ######################### # 2of2 test addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) # Tests for createmultisig and addmultisigaddress assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"]) self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here. mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address'] #use balance deltas instead of absolute values bal = self.nodes[2].getbalance() # send 1.2 BTC to msig adr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance # 2of3 test from different nodes bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr3 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) addr3Obj = self.nodes[2].getaddressinfo(addr3) mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address'] txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() #THIS IS AN INCOMPLETE FEATURE #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable txDetails = self.nodes[0].gettransaction(txId, True) rawTx = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = False for outpoint in rawTx['vout']: if outpoint['value'] == Decimal('2.20000000'): vout = outpoint break bal = self.nodes[0].getbalance() inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}] outputs = { self.nodes[0].getnewaddress() : 2.19 } rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs) assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs) assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys self.nodes[2].sendrawtransaction(rawTxSigned['hex']) rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx # 2of2 test for combining transactions bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObjValid = self.nodes[2].getaddressinfo(mSigObj) txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable txDetails = self.nodes[0].gettransaction(txId, True) rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = False for outpoint in rawTx2['vout']: if outpoint['value'] == Decimal('2.20000000'): vout = outpoint break bal = self.nodes[0].getbalance() inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}] outputs = { self.nodes[0].getnewaddress() : 2.19 } rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs) self.log.debug(rawTxPartialSigned1) assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs) self.log.debug(rawTxPartialSigned2) assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']]) self.log.debug(rawTxComb) self.nodes[2].sendrawtransaction(rawTxComb) rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx # decoderawtransaction tests # witness transaction encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000" decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction # non-witness transaction encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000" decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) # getrawtransaction tests # 1. valid parameters - only supply txid txHash = rawTx["hash"] assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex']) # 2. valid parameters - supply txid and 0 for non-verbose assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex']) # 3. valid parameters - supply txid and False for non-verbose assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex']) # 4. valid parameters - supply txid and 1 for verbose. # We only check the "hex" field of the output so we don't need to update this test every time the output format changes. assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex']) # 5. valid parameters - supply txid and True for non-verbose assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex']) # 6. invalid parameters - supply txid and string "Flase" assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, "Flase") # 7. invalid parameters - supply txid and empty array assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, []) # 8. invalid parameters - supply txid and empty dict assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, {}) inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx= self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 1000) # 9. invalid parameters - sequence number out of range inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}] outputs = { self.nodes[0].getnewaddress() : 1 } assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) # 10. invalid parameters - sequence number out of range inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}] outputs = { self.nodes[0].getnewaddress() : 1 } assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx= self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 4294967294) #################################### # TRANSACTION VERSION NUMBER TESTS # #################################### # Test the minimum transaction version number that fits in a signed 32-bit integer. tx = CTransaction() tx.nVersion = -0x80000000 rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], -0x80000000) # Test the maximum transaction version number that fits in a signed 32-bit integer. tx = CTransaction() tx.nVersion = 0x7fffffff rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], 0x7fffffff)
def def_utxo(height): hex_id = hex_str_to_bytes('0' * 64) uint256 = uint256_from_str(hex_id) return UTXO(height, TxType.REGULAR, COutPoint(uint256, 0), CTxOut(0, b""))
def FromHex(obj, hex_string): obj.deserialize(BytesIO(hex_str_to_bytes(hex_string))) return obj
def test_addrv2_no_addresses(self): self.test_addrv2('no addresses', [ 'received: addrv2 (1 bytes)', ], hex_str_to_bytes('00'))
def decodescript_script_pub_key(self): public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2' push_public_key = '21' + public_key public_key_hash = '5dd1d3a048119c27b28293056724d9522f26d945' push_public_key_hash = '14' + public_key_hash uncompressed_public_key = '04b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb25e01fc8fde47c96c98a4f3a8123e33a38a50cf9025cc8c4494a518f991792bb7' push_uncompressed_public_key = '41' + uncompressed_public_key p2wsh_p2pk_script_hash = 'd8590cf8ea0674cf3d49fd7ca249b85ef7485dea62c138468bddeb20cd6519f7' # below are test cases for all of the standard transaction types # 1) P2PK scriptPubKey # <pubkey> OP_CHECKSIG rpc_result = self.nodes[0].decodescript(push_public_key + 'ac') assert_equal(public_key + ' OP_CHECKSIG', rpc_result['asm']) # P2PK is translated to P2WPKH assert_equal('0 ' + public_key_hash, rpc_result['segwit']['asm']) # 2) P2PKH scriptPubKey # OP_DUP OP_HASH160 <PubKeyHash> OP_EQUALVERIFY OP_CHECKSIG rpc_result = self.nodes[0].decodescript('76a9' + push_public_key_hash + '88ac') assert_equal( 'OP_DUP OP_HASH160 ' + public_key_hash + ' OP_EQUALVERIFY OP_CHECKSIG', rpc_result['asm']) # P2PKH is translated to P2WPKH assert_equal('0 ' + public_key_hash, rpc_result['segwit']['asm']) # 3) multisig scriptPubKey # <m> <A pubkey> <B pubkey> <C pubkey> <n> OP_CHECKMULTISIG # just imagine that the pub keys used below are different. # for our purposes here it does not matter that they are the same even though it is unrealistic. multisig_script = '52' + push_public_key + push_public_key + push_public_key + '53ae' rpc_result = self.nodes[0].decodescript(multisig_script) assert_equal( '2 ' + public_key + ' ' + public_key + ' ' + public_key + ' 3 OP_CHECKMULTISIG', rpc_result['asm']) # multisig in P2WSH multisig_script_hash = bytes_to_hex_str( sha256(hex_str_to_bytes(multisig_script))) assert_equal('0 ' + multisig_script_hash, rpc_result['segwit']['asm']) # 4) P2SH scriptPubKey # OP_HASH160 <Hash160(redeemScript)> OP_EQUAL. # push_public_key_hash here should actually be the hash of a redeem script. # but this works the same for purposes of this test. rpc_result = self.nodes[0].decodescript('a9' + push_public_key_hash + '87') assert_equal('OP_HASH160 ' + public_key_hash + ' OP_EQUAL', rpc_result['asm']) # P2SH does not work in segwit secripts. decodescript should not return a result for it. assert 'segwit' not in rpc_result # 5) null data scriptPubKey # use a signature look-alike here to make sure that we do not decode random data as a signature. # this matters if/when signature sighash decoding comes along. # would want to make sure that no such decoding takes place in this case. signature_imposter = '48304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001' # OP_RETURN <data> rpc_result = self.nodes[0].decodescript('6a' + signature_imposter) assert_equal('OP_RETURN ' + signature_imposter[2:], rpc_result['asm']) # 6) a CLTV redeem script. redeem scripts are in-effect scriptPubKey scripts, so adding a test here. # OP_NOP2 is also known as OP_CHECKLOCKTIMEVERIFY. # just imagine that the pub keys used below are different. # for our purposes here it does not matter that they are the same even though it is unrealistic. # # OP_IF # <receiver-pubkey> OP_CHECKSIGVERIFY # OP_ELSE # <lock-until> OP_CHECKLOCKTIMEVERIFY OP_DROP # OP_ENDIF # <sender-pubkey> OP_CHECKSIG # # lock until block 500,000 cltv_script = '63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac' rpc_result = self.nodes[0].decodescript(cltv_script) assert_equal( 'OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_CHECKLOCKTIMEVERIFY OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm']) # CLTV script in P2WSH cltv_script_hash = bytes_to_hex_str( sha256(hex_str_to_bytes(cltv_script))) assert_equal('0 ' + cltv_script_hash, rpc_result['segwit']['asm']) # 7) P2PK scriptPubKey # <pubkey> OP_CHECKSIG rpc_result = self.nodes[0].decodescript(push_uncompressed_public_key + 'ac') assert_equal(uncompressed_public_key + ' OP_CHECKSIG', rpc_result['asm']) # uncompressed pubkeys are invalid for checksigs in segwit scripts. # decodescript should not return a P2WPKH equivalent. assert 'segwit' not in rpc_result # 8) multisig scriptPubKey with an uncompressed pubkey # <m> <A pubkey> <B pubkey> <n> OP_CHECKMULTISIG # just imagine that the pub keys used below are different. # the purpose of this test is to check that a segwit script is not returned for bare multisig scripts # with an uncompressed pubkey in them. rpc_result = self.nodes[0].decodescript('52' + push_public_key + push_uncompressed_public_key + '52ae') assert_equal( '2 ' + public_key + ' ' + uncompressed_public_key + ' 2 OP_CHECKMULTISIG', rpc_result['asm']) # uncompressed pubkeys are invalid for checksigs in segwit scripts. # decodescript should not return a P2WPKH equivalent. assert 'segwit' not in rpc_result # 9) P2WPKH scriptpubkey # 0 <PubKeyHash> rpc_result = self.nodes[0].decodescript('00' + push_public_key_hash) assert_equal('0 ' + public_key_hash, rpc_result['asm']) # segwit scripts do not work nested into each other. # a nested segwit script should not be returned in the results. assert 'segwit' not in rpc_result # 10) P2WSH scriptpubkey # 0 <ScriptHash> # even though this hash is of a P2PK script which is better used as bare P2WPKH, it should not matter # for the purpose of this test. rpc_result = self.nodes[0].decodescript('0020' + p2wsh_p2pk_script_hash) assert_equal('0 ' + p2wsh_p2pk_script_hash, rpc_result['asm']) # segwit scripts do not work nested into each other. # a nested segwit script should not be returned in the results. assert 'segwit' not in rpc_result
def run_test(self): blocks = [] self.bl_count = 0 blocks.append(self.nodes[0].getblockhash(0)) small_target_h = 3 self.mark_logs("Node0 generates %d blocks" % (CBH_DELTA_HEIGHT + small_target_h - 1)) blocks.extend(self.nodes[0].generate(CBH_DELTA_HEIGHT + small_target_h - 1)) self.sync_all() amount_node1 = Decimal("1002.0") self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), amount_node1) self.sync_all() self.mark_logs("Node0 generates 1 blocks") blocks.extend(self.nodes[0].generate(1)) self.sync_all() self.mark_logs( "Trying to send a tx with a scriptPubKey referencing a block too recent..." ) # Create a tx having in its scriptPubKey a custom referenced block in the CHECKBLOCKATHEIGHT part # select necessary utxos for doing the PAYMENT usp = self.nodes[0].listunspent() payment = Decimal('1.0') fee = Decimal('0.00005') amount = Decimal('0') inputs = [] for x in usp: amount += Decimal(x['amount']) inputs.append({"txid": x['txid'], "vout": x['vout']}) if amount >= payment + fee: break outputs = { self.nodes[0].getnewaddress(): (Decimal(amount) - payment - fee), self.nodes[2].getnewaddress(): payment } rawTx = self.nodes[0].createrawtransaction(inputs, outputs) # build an object from the raw tx in order to be able to modify it tx_01 = CTransaction() f = cStringIO.StringIO(unhexlify(rawTx)) tx_01.deserialize(f) decodedScriptOrig = self.nodes[0].decodescript( binascii.hexlify(tx_01.vout[1].scriptPubKey)) scriptOrigAsm = decodedScriptOrig['asm'] # store the hashed script, it is reused params = scriptOrigAsm.split() hash160 = hex_str_to_bytes(params[2]) # new referenced block height modTargetHeigth = CBH_DELTA_HEIGHT + small_target_h - FINALITY_MIN_AGE + 5 # new referenced block hash modTargetHash = hex_str_to_bytes(swap_bytes(blocks[modTargetHeigth])) # build modified script modScriptPubKey = CScript([ OP_DUP, OP_HASH160, hash160, OP_EQUALVERIFY, OP_CHECKSIG, modTargetHash, modTargetHeigth, OP_CHECKBLOCKATHEIGHT ]) tx_01.vout[1].scriptPubKey = modScriptPubKey tx_01.rehash() decodedScriptMod = self.nodes[0].decodescript( binascii.hexlify(tx_01.vout[1].scriptPubKey)) print " Modified scriptPubKey in tx 1: ", decodedScriptMod['asm'] signedRawTx = self.nodes[0].signrawtransaction(ToHex(tx_01)) h = self.nodes[0].getblockcount() assert_greater_than(FINALITY_MIN_AGE, h - modTargetHeigth) print " Node0 sends %f coins to Node2" % payment try: txid = self.nodes[0].sendrawtransaction(signedRawTx['hex']) print " Tx sent: ", txid # should fail, therefore force test failure assert_equal(True, False) except JSONRPCException, e: print " ==> tx has been rejected as expected:" print " referenced block height=%d, chainActive.height=%d, minimumAge=%d" % ( modTargetHeigth, h, FINALITY_MIN_AGE) print
def run_test(self): self.nodes[0].generate(161, self.signblockprivkey) #block 161 self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({}) assert (tmpl['sizelimit'] == 1000000) assert ('weightlimit' not in tmpl) assert (tmpl['sigoplimit'] == 20000) assert (tmpl['transactions'][0]['txid'] == txid) assert (tmpl['transactions'][0]['sigops'] == 2) balance_presetup = self.nodes[0].getbalance() self.pubkey = [] p2sh_ids = [ ] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh wit_ids = [ ] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness for i in range(3): newaddress = self.nodes[i].getnewaddress() self.pubkey.append( self.nodes[i].getaddressinfo(newaddress)["pubkey"]) multiscript = CScript([ OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG ]) p2sh_ms_addr = self.nodes[i].addmultisigaddress( 1, [self.pubkey[-1]], '', 'legacy')['address'] #p2sh-ms address is not p2sh-p2wsh-ms in tapyrus assert_equal(p2sh_ms_addr, script_to_p2sh(multiscript)) p2sh_ids.append([]) wit_ids.append([]) for v in range(2): p2sh_ids[i].append([]) wit_ids[i].append([]) for i in range(5): for n in range(3): for v in range(2): wit_ids[n][v].append( send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999"))) p2sh_ids[n][v].append( send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999"))) self.nodes[0].generate(1, self.signblockprivkey) #block 163 sync_blocks(self.nodes) # Make sure all nodes recognize the transactions as theirs #assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50) assert_equal(self.nodes[1].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 0) self.nodes[0].generate(260, self.signblockprivkey) #block 423 sync_blocks(self.nodes) self.log.info("Verify that only P2SH endoced witness txs are accepted") self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424 self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426 self.log.info( "Verify unsigned p2sh witness txs without a redeem script are invalid" ) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][0], False) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][0], False) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False) self.nodes[2].generate(4, self.signblockprivkey) # blocks 428-431 self.log.info( "Verify previous witness txs skipped for mining can now be mined") assert_equal(len(self.nodes[2].getrawmempool()), 0) block = self.nodes[2].generate( 1, self.signblockprivkey ) #block 432 (first block with new rules; 432 = 144 * 3) sync_blocks(self.nodes) assert_equal(len(self.nodes[2].getrawmempool()), 0) self.log.info( "Verify default node can't accept txs with missing witness") # unsigned, no scriptsig self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False) # unsigned with redeem script self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0])) self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0])) self.log.info( "Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag" ) assert (self.nodes[2].getblock(block[0], False) == self.nodes[0].getblock( block[0], False)) assert (self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock( block[0], False)) self.log.info("Verify witness txs without witness data are invalid") self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", wit_ids[NODE_2][WIT_V0][2], False) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", wit_ids[NODE_2][WIT_V1][2], False) self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2])) self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2])) self.log.info("Verify sigops are counted in GBT with BIP141 rules") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) self.nodes[0].generate( 1, self.signblockprivkey) # Mine a block to clear the gbt cache self.log.info( "Non-segwit miners are able to use GBT response after activation.") # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) -> # tx2 (segwit input, paying to a non-segwit output) -> # tx3 (non-segwit input, paying to a non-segwit output). # tx1 is allowed to appear in the block, but no others. txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996")) hex_tx = self.nodes[0].gettransaction(txid)['hex'] tx = FromHex(CTransaction(), hex_tx) assert (tx.wit.is_null()) # This should not be a segwit input assert (txid1 in self.nodes[0].getrawmempool()) # Now create tx2, which will spend from txid1. tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b'')) tx.vout.append( CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) tx2_hex = self.nodes[0].signrawtransactionwithwallet( ToHex(tx), [], "ALL", self.options.scheme)['hex'] assert_raises_rpc_error(-26, "mandatory-script-verify-flag-failed", self.nodes[0].sendrawtransaction, tx2_hex) tx = FromHex(CTransaction(), tx2_hex) assert (tx.wit.is_null()) # Now create tx3, which will spend from txid2 tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b"")) tx.vout.append( CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee tx.calc_sha256() assert_raises_rpc_error(-26, "mandatory-script-verify-flag-failed", self.nodes[0].sendrawtransaction, ToHex(tx)) assert (tx.wit.is_null()) # Now try calling getblocktemplate() without segwit support. template = self.nodes[0].getblocktemplate() # Check that tx1 is the only transaction of the 3 in the template. template_txids = [t['txid'] for t in template['transactions']] assert (txid1 in template_txids) # Check that running with segwit support results in all 3 being included. template = self.nodes[0].getblocktemplate({"rules": ["segwit"]}) template_txids = [t['txid'] for t in template['transactions']] assert (txid1 in template_txids) # Mine a block to clear the gbt cache again. self.nodes[0].generate(1, self.signblockprivkey) self.log.info( "Verify behaviour of importaddress, addwitnessaddress and listunspent" ) # Some public keys to be used later pubkeys = [ "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97 "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66 "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ ] # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey( "92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn") uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"] self.nodes[0].importprivkey( "cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR") compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"] assert ((self.nodes[0].getaddressinfo( uncompressed_spendable_address[0])['iscompressed'] == False)) assert ((self.nodes[0].getaddressinfo( compressed_spendable_address[0])['iscompressed'] == True)) self.nodes[0].importpubkey(pubkeys[0]) compressed_solvable_address = [key_to_p2pkh(pubkeys[0])] self.nodes[0].importpubkey(pubkeys[1]) compressed_solvable_address.append(key_to_p2pkh(pubkeys[1])) self.nodes[0].importpubkey(pubkeys[2]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])] spendable_anytime = [ ] # These outputs should be seen anytime after importprivkey and addmultisigaddress spendable_after_importaddress = [ ] # These outputs should be seen after importaddress solvable_after_importaddress = [ ] # These outputs should be seen after importaddress but not spendable unsolvable_after_importaddress = [ ] # These outputs should be unsolvable after importaddress solvable_anytime = [ ] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], uncompressed_spendable_address[0] ])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [ compressed_spendable_address[0], uncompressed_solvable_address[0] ])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_solvable_address[0] ])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_solvable_address[0], compressed_solvable_address[1] ])['address']) unknown_address = [ "mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx" ] # Test multisig_without_privkey # We have 2 public keys without private keys, use addmultisigaddress to add to wallet. # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address. multisig_without_privkey_address = self.nodes[0].addmultisigaddress( 2, [pubkeys[3], pubkeys[4]])['address'] script = CScript([ OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG ]) solvable_after_importaddress.append( CScript([OP_HASH160, hash160(script), OP_EQUAL])) for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with compressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with compressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) unseen_anytime.extend([ p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh, p2wpkh, p2sh_p2wpkh ]) for i in uncompressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with uncompressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([ p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): # Multisig without private is not seen after addmultisigaddress, but seen after importaddress [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) solvable_after_importaddress.extend( [bare, p2sh, p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress solvable_after_importaddress.extend([ p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ]) for i in uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress solvable_after_importaddress.extend([bare, p2sh]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([ p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ]) op1 = CScript([OP_1]) op0 = CScript([OP_0]) # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V unsolvable_address = [ "mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0) ] unsolvable_address_key = hex_str_to_bytes( "02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D" ) unsolvablep2pkh = CScript([ OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG ]) unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)]) p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL]) p2wshop1 = CScript([OP_0, sha256(op1)]) unsolvable_after_importaddress.append(unsolvablep2pkh) unsolvable_after_importaddress.append(unsolvablep2wshp2pkh) unsolvable_after_importaddress.append( op1) # OP_1 will be imported as script unsolvable_after_importaddress.append(p2wshop1) unseen_anytime.append( op0 ) # OP_0 will be imported as P2SH address with no script provided unsolvable_after_importaddress.append(p2shop0) spendable_txid = [] solvable_txid = [] spendable_txid.append( self.mine_and_test_listunspent(spendable_anytime, 2)) solvable_txid.append( self.mine_and_test_listunspent(solvable_anytime, 1)) self.mine_and_test_listunspent( spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0) importlist = [] for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): bare = hex_str_to_bytes(v['hex']) importlist.append(bytes_to_hex_str(bare)) importlist.append( bytes_to_hex_str(CScript([OP_0, sha256(bare)]))) else: pubkey = hex_str_to_bytes(v['pubkey']) p2pk = CScript([pubkey, OP_CHECKSIG]) p2pkh = CScript([ OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG ]) importlist.append(bytes_to_hex_str(p2pk)) importlist.append(bytes_to_hex_str(p2pkh)) importlist.append( bytes_to_hex_str(CScript([OP_0, hash160(pubkey)]))) importlist.append( bytes_to_hex_str(CScript([OP_0, sha256(p2pk)]))) importlist.append( bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)]))) importlist.append(bytes_to_hex_str(unsolvablep2pkh)) importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh)) importlist.append(bytes_to_hex_str(op1)) importlist.append(bytes_to_hex_str(p2wshop1)) for i in importlist: # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC # exceptions and continue. try_rpc( -4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True) self.nodes[0].importaddress( script_to_p2sh(op0)) # import OP_0 as address only self.nodes[0].importaddress( multisig_without_privkey_address) # Test multisig_without_privkey spendable_txid.append( self.mine_and_test_listunspent( spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append( self.mine_and_test_listunspent( solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) #self.mine_and_test_listunspent(unseen_anytime, 0) # addwitnessaddress should refuse to return a witness address if an uncompressed key is used # note that no witness address should be returned by unsolvable addresses for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address: assert_raises_rpc_error( -4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) for i in compressed_spendable_address + compressed_solvable_address: assert_raises_rpc_error( -4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) spendable_txid.append( self.mine_and_test_listunspent( spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append( self.mine_and_test_listunspent( solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) #self.mine_and_test_listunspent(unseen_anytime, 0) # Repeat some tests. This time we don't add witness scripts with importaddress # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey( "927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH") uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"] self.nodes[0].importprivkey( "cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw") compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"] self.nodes[0].importpubkey(pubkeys[5]) compressed_solvable_address = [key_to_p2pkh(pubkeys[5])] self.nodes[0].importpubkey(pubkeys[6]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])] spendable_after_addwitnessaddress = [ ] # These outputs should be seen after importaddress solvable_after_addwitnessaddress = [ ] # These outputs should be seen after importaddress but not spendable unseen_anytime = [] # These outputs should never be seen solvable_anytime = [ ] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], uncompressed_spendable_address[0] ])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_solvable_address[0], uncompressed_solvable_address[0] ])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_solvable_address[0] ])['address']) premature_witaddress = [] for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress unseen_anytime.extend([p2wsh, p2sh_p2wsh]) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH are always spendable unseen_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in uncompressed_spendable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): # P2WSH multisig without private key are seen after addwitnessaddress [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) unseen_anytime.extend([p2wsh, p2sh_p2wsh]) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable solvable_anytime.extend([p2wpkh, p2sh_p2wpkh]) self.mine_and_test_listunspent(spendable_anytime, 2) self.mine_and_test_listunspent(solvable_anytime, 1) self.mine_and_test_listunspent( spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0) # addwitnessaddress should refuse to return a witness address if an uncompressed key is used # note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress # premature_witaddress are not accepted until the script is added with addwitnessaddress first for i in uncompressed_spendable_address + uncompressed_solvable_address: assert_raises_rpc_error( -4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) for i in premature_witaddress: assert_raises_rpc_error( -4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) # after importaddress it should pass addwitnessaddress v = self.nodes[0].getaddressinfo(compressed_solvable_address[1]) self.nodes[0].importaddress(v['hex'], "", False, True) for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress: assert_raises_rpc_error( -4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) spendable_txid.append( self.mine_and_test_listunspent( spendable_after_addwitnessaddress + spendable_anytime, 2)) solvable_txid.append( self.mine_and_test_listunspent( solvable_after_addwitnessaddress + solvable_anytime, 1)) self.mine_and_test_listunspent(unseen_anytime, 0) # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works self.log.info("Verify bech32 addresses are not supported") v1_addr = program_to_witness(1, [3, 5]) assert_raises_rpc_error(-5, "Invalid Tapyrus address", self.nodes[0].createrawtransaction, [getutxo(spendable_txid[0])], {v1_addr: 1}) # Check that spendable outputs are really spendable self.create_and_mine_tx_from_txids(spendable_txid) # import all the private keys so solvable addresses become spendable self.nodes[0].importprivkey( "cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb") self.nodes[0].importprivkey( "cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97") self.nodes[0].importprivkey( "91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV") self.nodes[0].importprivkey( "cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd") self.nodes[0].importprivkey( "cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66") self.nodes[0].importprivkey( "cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K") self.create_and_mine_tx_from_txids(solvable_txid) # Test that importing native P2WPKH/P2WSH scripts is not supported for use_p2wsh in [False, True]: if use_p2wsh: scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a" transaction = "0100000000e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000" else: scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87" transaction = "0100000000e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000" self.nodes[1].importaddress(scriptPubKey, "", False) assert_raises_rpc_error(-22, "TX decode failed", self.nodes[1].fundrawtransaction, transaction)
def from_hex(self, hexstring): f = BytesIO(hex_str_to_bytes(hexstring)) self.deserialize(f)
def p2sh_address_to_script(self, v): bare = CScript(hex_str_to_bytes(v['hex'])) p2sh = CScript(hex_str_to_bytes(v['scriptPubKey'])) p2wsh = CScript([OP_0, sha256(bare)]) p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL]) return ([bare, p2sh, p2wsh, p2sh_p2wsh])
def tx_from_hex(hexstring): tx = CTransaction() f = BytesIO(hex_str_to_bytes(hexstring)) tx.deserialize(f) return tx
def run_test(self): self.nodes[0].generate(161) # block 161 self.log.info( "Verify sigops are counted in GBT with pre-BIP141 rules before the fork" ) txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) assert tmpl['sizelimit'] == 1000000 assert 'weightlimit' not in tmpl assert tmpl['sigoplimit'] == 20000 assert tmpl['transactions'][0]['hash'] == txid assert tmpl['transactions'][0]['sigops'] == 2 assert '!segwit' not in tmpl['rules'] self.nodes[0].generate(1) # block 162 balance_presetup = self.nodes[0].getbalance() self.pubkey = [] p2sh_ids = [ ] # p2sh_ids[NODE][TYPE] is an array of txids that spend to P2WPKH (TYPE=0) or P2WSH (TYPE=1) scripts to an address for NODE embedded in p2sh wit_ids = [ ] # wit_ids[NODE][TYPE] is an array of txids that spend to P2WPKH (TYPE=0) or P2WSH (TYPE=1) scripts to an address for NODE via bare witness for i in range(3): newaddress = self.nodes[i].getnewaddress() self.pubkey.append( self.nodes[i].getaddressinfo(newaddress)["pubkey"]) multiscript = CScript([ OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG ]) p2sh_ms_addr = self.nodes[i].addmultisigaddress( 1, [self.pubkey[-1]], '', 'p2sh-segwit')['address'] bip173_ms_addr = self.nodes[i].addmultisigaddress( 1, [self.pubkey[-1]], '', 'bech32')['address'] assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript)) assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript)) p2sh_ids.append([]) wit_ids.append([]) for _ in range(2): p2sh_ids[i].append([]) wit_ids[i].append([]) for _ in range(5): for n in range(3): for v in range(2): wit_ids[n][v].append( send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999"))) p2sh_ids[n][v].append( send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999"))) self.nodes[0].generate(1) # block 163 self.sync_blocks() # Make sure all nodes recognize the transactions as theirs assert_equal(self.nodes[0].getbalance(), balance_presetup - 60 * 50 + 20 * Decimal("49.999") + 50) assert_equal(self.nodes[1].getbalance(), 20 * Decimal("49.999")) assert_equal(self.nodes[2].getbalance(), 20 * Decimal("49.999")) self.nodes[0].generate(260) # block 423 self.sync_blocks() self.log.info( "Verify witness txs are skipped for mining before the fork") self.skip_mine(self.nodes[2], wit_ids[NODE_2][P2WPKH][0], True) # block 424 self.skip_mine(self.nodes[2], wit_ids[NODE_2][P2WSH][0], True) # block 425 self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][P2WPKH][0], True) # block 426 self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][P2WSH][0], True) # block 427 self.log.info( "Verify unsigned p2sh witness txs without a redeem script are invalid" ) self.fail_accept( self.nodes[2], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WPKH][1], sign=False) self.fail_accept( self.nodes[2], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WSH][1], sign=False) self.nodes[2].generate(4) # blocks 428-431 self.log.info( "Verify previous witness txs skipped for mining can now be mined") assert_equal(len(self.nodes[2].getrawmempool()), 4) blockhash = self.nodes[2].generate(1)[ 0] # block 432 (first block with new rules; 432 = 144 * 3) self.sync_blocks() assert_equal(len(self.nodes[2].getrawmempool()), 0) segwit_tx_list = self.nodes[2].getblock(blockhash)["tx"] assert_equal(len(segwit_tx_list), 5) self.log.info( "Verify default node can't accept txs with missing witness") # unsigned, no scriptsig self.fail_accept( self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False) self.fail_accept( self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False) self.fail_accept( self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WPKH][0], sign=False) self.fail_accept( self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WSH][0], sign=False) # unsigned with redeem script self.fail_accept( self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0])) self.fail_accept( self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0])) self.log.info( "Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag" ) assert self.nodes[2].getblock( blockhash, False) != self.nodes[0].getblock(blockhash, False) assert self.nodes[1].getblock(blockhash, False) == self.nodes[2].getblock( blockhash, False) for tx_id in segwit_tx_list: tx = FromHex(CTransaction(), self.nodes[2].gettransaction(tx_id)["hex"]) assert self.nodes[2].getrawtransaction( tx_id, False, blockhash) != self.nodes[0].getrawtransaction( tx_id, False, blockhash) assert self.nodes[1].getrawtransaction( tx_id, False, blockhash) == self.nodes[2].getrawtransaction( tx_id, False, blockhash) assert self.nodes[0].getrawtransaction( tx_id, False, blockhash) != self.nodes[2].gettransaction(tx_id)["hex"] assert self.nodes[1].getrawtransaction( tx_id, False, blockhash) == self.nodes[2].gettransaction(tx_id)["hex"] assert self.nodes[0].getrawtransaction( tx_id, False, blockhash) == tx.serialize_without_witness().hex() # Coinbase contains the witness commitment nonce, check that RPC shows us coinbase_txid = self.nodes[2].getblock(blockhash)['tx'][0] coinbase_tx = self.nodes[2].gettransaction(txid=coinbase_txid, verbose=True) witnesses = coinbase_tx["decoded"]["vin"][0]["txinwitness"] assert_equal(len(witnesses), 1) assert_is_hex_string(witnesses[0]) assert_equal(witnesses[0], '00' * 32) self.log.info( "Verify witness txs without witness data are invalid after the fork" ) self.fail_accept( self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False) self.fail_accept( self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False) self.fail_accept( self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2])) self.fail_accept( self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2])) self.log.info("Verify default node can now use witness txs") self.success_mine(self.nodes[0], wit_ids[NODE_0][P2WPKH][0], True) # block 432 self.success_mine(self.nodes[0], wit_ids[NODE_0][P2WSH][0], True) # block 433 self.success_mine(self.nodes[0], p2sh_ids[NODE_0][P2WPKH][0], True) # block 434 self.success_mine(self.nodes[0], p2sh_ids[NODE_0][P2WSH][0], True) # block 435 self.log.info( "Verify sigops are counted in GBT with BIP141 rules after the fork" ) txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) assert tmpl[ 'sizelimit'] >= 3999577 # actual maximum size is lower due to minimum mandatory non-witness data assert tmpl['weightlimit'] == 4000000 assert tmpl['sigoplimit'] == 80000 assert tmpl['transactions'][0]['txid'] == txid assert tmpl['transactions'][0]['sigops'] == 8 assert '!segwit' in tmpl['rules'] self.nodes[0].generate(1) # Mine a block to clear the gbt cache self.log.info( "Non-segwit miners are able to use GBT response after activation.") # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) -> # tx2 (segwit input, paying to a non-segwit output) -> # tx3 (non-segwit input, paying to a non-segwit output). # tx1 is allowed to appear in the block, but no others. txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996")) hex_tx = self.nodes[0].gettransaction(txid)['hex'] tx = FromHex(CTransaction(), hex_tx) assert tx.wit.is_null() # This should not be a segwit input assert txid1 in self.nodes[0].getrawmempool() tx1_hex = self.nodes[0].gettransaction(txid1)['hex'] tx1 = FromHex(CTransaction(), tx1_hex) # Check that hash and wtxid are properly reported in mempool entry (txid1) assert_equal(int(self.nodes[0].getmempoolentry(txid1)["hash"], 16), tx1.calc_sha256(True)) assert_equal(int(self.nodes[0].getmempoolentry(txid1)["wtxid"], 16), tx1.calc_sha256(True)) # Check that weight and vsize are properly reported in mempool entry (txid1) assert_equal(self.nodes[0].getmempoolentry(txid1)["vsize"], (self.nodes[0].getmempoolentry(txid1)["weight"] + 3) // 4) assert_equal( self.nodes[0].getmempoolentry(txid1)["weight"], len(tx1.serialize_without_witness()) * 3 + len(tx1.serialize_with_witness())) # Now create tx2, which will spend from txid1. tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b'')) tx.vout.append( CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex'] txid2 = self.nodes[0].sendrawtransaction(tx2_hex) tx = FromHex(CTransaction(), tx2_hex) assert not tx.wit.is_null() # Check that hash and wtxid are properly reported in mempool entry (txid2) assert_equal(int(self.nodes[0].getmempoolentry(txid2)["hash"], 16), tx.calc_sha256(True)) assert_equal(int(self.nodes[0].getmempoolentry(txid2)["wtxid"], 16), tx.calc_sha256(True)) # Check that weight and vsize are properly reported in mempool entry (txid2) assert_equal(self.nodes[0].getmempoolentry(txid2)["vsize"], (self.nodes[0].getmempoolentry(txid2)["weight"] + 3) // 4) assert_equal( self.nodes[0].getmempoolentry(txid2)["weight"], len(tx.serialize_without_witness()) * 3 + len(tx.serialize_with_witness())) # Now create tx3, which will spend from txid2 tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b"")) tx.vout.append( CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee tx.calc_sha256() txid3 = self.nodes[0].sendrawtransaction(hexstring=ToHex(tx), maxfeerate=0) assert tx.wit.is_null() assert txid3 in self.nodes[0].getrawmempool() # Check that getblocktemplate includes all transactions. template = self.nodes[0].getblocktemplate({"rules": ["segwit"]}) template_txids = [t['txid'] for t in template['transactions']] assert txid1 in template_txids assert txid2 in template_txids assert txid3 in template_txids # Check that hash and wtxid are properly reported in mempool entry (txid3) assert_equal(int(self.nodes[0].getmempoolentry(txid3)["hash"], 16), tx.calc_sha256(True)) assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True)) # Check that weight and vsize are properly reported in mempool entry (txid3) assert_equal(self.nodes[0].getmempoolentry(txid3)["vsize"], (self.nodes[0].getmempoolentry(txid3)["weight"] + 3) // 4) assert_equal( self.nodes[0].getmempoolentry(txid3)["weight"], len(tx.serialize_without_witness()) * 3 + len(tx.serialize_with_witness())) # Mine a block to clear the gbt cache again. self.nodes[0].generate(1) self.log.info("Signing with all-segwit inputs reveals fee rate") addr = self.nodes[0].getnewaddress(address_type='p2sh-segwit') txid = self.nodes[0].sendtoaddress(addr, 1) tx = self.nodes[0].getrawtransaction(txid, True) n = -1 value = -1 for o in tx["vout"]: if o["scriptPubKey"]["addresses"][0] == addr: n = o["n"] value = Decimal(o["value"]) break assert n > -1 # failure means we could not find the address in the outputs despite sending to it assert_equal( value, 1 ) # failure means we got an unexpected amount of coins, despite trying to send 1 fee = Decimal("0.00010000") value_out = value - fee self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress()) raw = self.nodes[0].createrawtransaction([{ "txid": txid, "vout": n }], [{ self.nodes[0].getnewaddress(): value_out }]) signed = self.nodes[0].signrawtransactionwithwallet(raw) assert_equal(signed["complete"], True) txsize = self.nodes[0].decoderawtransaction(signed['hex'])['vsize'] exp_feerate = 1000 * fee / Decimal(txsize) assert_approx(signed["feerate"], exp_feerate, Decimal("0.00000010")) # discrepancy = 100000000 * (exp_feerate - signed["feerate"]) # assert -10 < discrepancy < 10 assert_equal(Decimal(signed["fee"]), fee) self.log.info("Verify behaviour of importaddress and listunspent") # Some public keys to be used later pubkeys = [ "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97 "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66 "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ ] # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey( "92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn") uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"] self.nodes[0].importprivkey( "cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR") compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"] assert not self.nodes[0].getaddressinfo( uncompressed_spendable_address[0])['iscompressed'] assert self.nodes[0].getaddressinfo( compressed_spendable_address[0])['iscompressed'] self.nodes[0].importpubkey(pubkeys[0]) compressed_solvable_address = [key_to_p2pkh(pubkeys[0])] self.nodes[0].importpubkey(pubkeys[1]) compressed_solvable_address.append(key_to_p2pkh(pubkeys[1])) self.nodes[0].importpubkey(pubkeys[2]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])] spendable_anytime = [ ] # These outputs should be seen anytime after importprivkey and addmultisigaddress spendable_after_importaddress = [ ] # These outputs should be seen after importaddress solvable_after_importaddress = [ ] # These outputs should be seen after importaddress but not spendable unsolvable_after_importaddress = [ ] # These outputs should be unsolvable after importaddress solvable_anytime = [ ] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], uncompressed_spendable_address[0] ])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [ compressed_spendable_address[0], uncompressed_solvable_address[0] ])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_solvable_address[0] ])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_solvable_address[0], compressed_solvable_address[1] ])['address']) # Test multisig_without_privkey # We have 2 public keys without private keys, use addmultisigaddress to add to wallet. # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address. multisig_without_privkey_address = self.nodes[0].addmultisigaddress( 2, [pubkeys[3], pubkeys[4]])['address'] script = CScript([ OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG ]) solvable_after_importaddress.append( CScript([OP_HASH160, hash160(script), OP_EQUAL])) for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with compressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with compressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress spendable_after_importaddress.extend([ p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ]) # P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in uncompressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with uncompressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([ p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): # Multisig without private is not seen after addmultisigaddress, but seen after importaddress [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) solvable_after_importaddress.extend( [bare, p2sh, p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress solvable_after_importaddress.extend([ p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ]) for i in uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress solvable_after_importaddress.extend([bare, p2sh]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([ p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ]) op1 = CScript([OP_1]) op0 = CScript([OP_0]) # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V unsolvable_address_key = hex_str_to_bytes( "02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D" ) unsolvablep2pkh = CScript([ OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG ]) unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)]) p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL]) p2wshop1 = CScript([OP_0, sha256(op1)]) unsolvable_after_importaddress.append(unsolvablep2pkh) unsolvable_after_importaddress.append(unsolvablep2wshp2pkh) unsolvable_after_importaddress.append( op1) # OP_1 will be imported as script unsolvable_after_importaddress.append(p2wshop1) unseen_anytime.append( op0 ) # OP_0 will be imported as P2SH address with no script provided unsolvable_after_importaddress.append(p2shop0) spendable_txid = [] solvable_txid = [] spendable_txid.append( self.mine_and_test_listunspent(spendable_anytime, 2)) solvable_txid.append( self.mine_and_test_listunspent(solvable_anytime, 1)) self.mine_and_test_listunspent( spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0) importlist = [] for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): bare = hex_str_to_bytes(v['hex']) importlist.append(bare.hex()) importlist.append(CScript([OP_0, sha256(bare)]).hex()) else: pubkey = hex_str_to_bytes(v['pubkey']) p2pk = CScript([pubkey, OP_CHECKSIG]) p2pkh = CScript([ OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG ]) importlist.append(p2pk.hex()) importlist.append(p2pkh.hex()) importlist.append(CScript([OP_0, hash160(pubkey)]).hex()) importlist.append(CScript([OP_0, sha256(p2pk)]).hex()) importlist.append(CScript([OP_0, sha256(p2pkh)]).hex()) importlist.append(unsolvablep2pkh.hex()) importlist.append(unsolvablep2wshp2pkh.hex()) importlist.append(op1.hex()) importlist.append(p2wshop1.hex()) for i in importlist: # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC # exceptions and continue. try_rpc( -4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True) self.nodes[0].importaddress( script_to_p2sh(op0)) # import OP_0 as address only self.nodes[0].importaddress( multisig_without_privkey_address) # Test multisig_without_privkey spendable_txid.append( self.mine_and_test_listunspent( spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append( self.mine_and_test_listunspent( solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) self.mine_and_test_listunspent(unseen_anytime, 0) spendable_txid.append( self.mine_and_test_listunspent( spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append( self.mine_and_test_listunspent( solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) self.mine_and_test_listunspent(unseen_anytime, 0) # Repeat some tests. This time we don't add witness scripts with importaddress # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey( "927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH") uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"] self.nodes[0].importprivkey( "cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw") compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"] self.nodes[0].importpubkey(pubkeys[5]) compressed_solvable_address = [key_to_p2pkh(pubkeys[5])] self.nodes[0].importpubkey(pubkeys[6]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])] unseen_anytime = [] # These outputs should never be seen solvable_anytime = [ ] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [ uncompressed_spendable_address[0], uncompressed_spendable_address[0] ])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_spendable_address[0] ])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_solvable_address[0], uncompressed_solvable_address[0] ])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress( 2, [compressed_spendable_address[0], compressed_solvable_address[0] ])['address']) premature_witaddress = [] for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH are always spendable spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in uncompressed_spendable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [ p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh ] = self.p2pkh_address_to_script(v) # P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable solvable_anytime.extend([p2wpkh, p2sh_p2wpkh]) self.mine_and_test_listunspent(spendable_anytime, 2) self.mine_and_test_listunspent(solvable_anytime, 1) self.mine_and_test_listunspent(unseen_anytime, 0) # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works v1_addr = program_to_witness(1, [3, 5]) v1_tx = self.nodes[0].createrawtransaction( [getutxo(spendable_txid[0])], {v1_addr: 1}) v1_decoded = self.nodes[1].decoderawtransaction(v1_tx) assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr) assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305") # Check that spendable outputs are really spendable self.create_and_mine_tx_from_txids(spendable_txid) # import all the private keys so solvable addresses become spendable self.nodes[0].importprivkey( "cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb") self.nodes[0].importprivkey( "cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97") self.nodes[0].importprivkey( "91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV") self.nodes[0].importprivkey( "cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd") self.nodes[0].importprivkey( "cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66") self.nodes[0].importprivkey( "cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K") self.create_and_mine_tx_from_txids(solvable_txid) # Test that importing native P2WPKH/P2WSH scripts works for use_p2wsh in [False, True]: if use_p2wsh: scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a" transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000" else: scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87" transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000" self.nodes[1].importaddress(scriptPubKey, "", False) rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex'] rawtxfund = self.nodes[1].signrawtransactionwithwallet( rawtxfund)["hex"] txid = self.nodes[1].sendrawtransaction(rawtxfund) assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) assert_equal( self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid) # Assert it is properly saved self.restart_node(1) assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) assert_equal( self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
def create_spam_block(self, hashPrevBlock, stakingPrevOuts, height, fStakeDoubleSpent=False, fZPoS=False, spendingPrevOuts={}): ''' creates a block to spam the network with :param hashPrevBlock: (hex string) hash of previous block stakingPrevOuts: ({COutPoint --> (int, int, int, str)} dictionary) map outpoints (to be used as staking inputs) to amount, block_time, nStakeModifier, hashStake height: (int) block height fStakeDoubleSpent: (bool) spend the coinstake input inside the block fZPoS: (bool) stake the block with zerocoin spendingPrevOuts: ({COutPoint --> (int, int, int, str)} dictionary) map outpoints (to be used as tx inputs) to amount, block_time, nStakeModifier, hashStake :return block: (CBlock) generated block ''' # If not given inputs to create spam txes, use a copy of the staking inputs if len(spendingPrevOuts) == 0: spendingPrevOuts = dict(stakingPrevOuts) # Get current time current_time = int(time.time()) nTime = current_time & 0xfffffff0 # Create coinbase TX # Even if PoS blocks have empty coinbase vout, the height is required for the vin script coinbase = create_coinbase(height) coinbase.vout[0].nValue = 0 coinbase.vout[0].scriptPubKey = b"" coinbase.nTime = nTime coinbase.rehash() # Create Block with coinbase block = create_block(int(hashPrevBlock, 16), coinbase, nTime) # Find valid kernel hash - Create a new private key used for block signing. if not block.solve_stake(stakingPrevOuts): raise Exception("Not able to solve for any prev_outpoint") # Sign coinstake TX and add it to the block signed_stake_tx = self.sign_stake_tx( block, stakingPrevOuts[block.prevoutStake][0], fZPoS) block.vtx.append(signed_stake_tx) # Remove coinstake input prevout unless we want to try double spending in the same block. # Skip for zPoS as the spendingPrevouts are just regular UTXOs if not fZPoS and not fStakeDoubleSpent: del spendingPrevOuts[block.prevoutStake] # remove a random prevout from the list # (to randomize block creation if the same height is picked two times) if len(spendingPrevOuts) > 0: del spendingPrevOuts[choice(list(spendingPrevOuts))] # Create spam for the block. Sign the spendingPrevouts for outPoint in spendingPrevOuts: value_out = int(spendingPrevOuts[outPoint][0] - self.DEFAULT_FEE * COIN) tx = create_transaction(outPoint, b"", value_out, nTime, scriptPubKey=CScript([ self.block_sig_key.get_pubkey(), OP_CHECKSIG ])) # sign txes signed_tx_hex = self.node.signrawtransaction( bytes_to_hex_str(tx.serialize()))['hex'] signed_tx = CTransaction() signed_tx.deserialize(BytesIO(hex_str_to_bytes(signed_tx_hex))) block.vtx.append(signed_tx) # Get correct MerkleRoot and rehash block block.hashMerkleRoot = block.calc_merkle_root() block.rehash() # Sign block with coinstake key and return it block.sign_block(self.block_sig_key) return block
def p2sh_address_to_script(self, v): bare = CScript(hex_str_to_bytes(v['hex'])) p2sh = CScript(hex_str_to_bytes(v['scriptPubKey'])) p2wsh = script_to_p2wsh_script(bare) p2sh_p2wsh = script_to_p2sh_script(p2wsh) return([bare, p2sh, p2wsh, p2sh_p2wsh])
def run_test(self): self.nodes[0].generate(161) # block 161 self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) assert(tmpl['sizelimit'] == 1000000) assert('weightlimit' not in tmpl) assert(tmpl['sigoplimit'] == 20000) assert(tmpl['transactions'][0]['txid'] == txid) assert(tmpl['transactions'][0]['sigops'] == 2) tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) assert(tmpl['sizelimit'] == 1000000) assert('weightlimit' not in tmpl) assert(tmpl['sigoplimit'] == 20000) assert(tmpl['transactions'][0]['txid'] == txid) assert(tmpl['transactions'][0]['sigops'] == 2) self.nodes[0].generate(1) # block 162 balance_presetup = self.nodes[0].getbalance()['bitcoin'] self.pubkey = [] p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness for i in range(3): newaddress = self.nodes[i].getnewaddress() self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"]) multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG]) p2sh_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'p2sh-segwit')['address'] bip173_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'bech32')['address'] assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript)) assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript)) p2sh_ids.append([]) wit_ids.append([]) for v in range(2): p2sh_ids[i].append([]) wit_ids[i].append([]) for i in range(5): for n in range(3): for v in range(2): wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999"))) p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999"))) self.nodes[0].generate(1) # block 163 sync_blocks(self.nodes) # Make sure all nodes recognize the transactions as theirs assert_equal(self.nodes[0].getbalance()['bitcoin'], balance_presetup - 60 * 50 + 20 * Decimal("49.999") + 50) assert_equal(self.nodes[1].getbalance()['bitcoin'], 20 * Decimal("49.999")) assert_equal(self.nodes[2].getbalance()['bitcoin'], 20 * Decimal("49.999")) self.nodes[0].generate(260) # block 423 sync_blocks(self.nodes) self.log.info("Verify witness txs are skipped for mining before the fork") self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) # block 424 self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) # block 425 self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) # block 426 self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) # block 427 self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid") self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False) self.nodes[2].generate(4) # blocks 428-431 self.log.info("Verify previous witness txs skipped for mining can now be mined") assert_equal(len(self.nodes[2].getrawmempool()), 4) blockhash = self.nodes[2].generate(1)[0] # block 432 (first block with new rules; 432 = 144 * 3) sync_blocks(self.nodes) assert_equal(len(self.nodes[2].getrawmempool()), 0) segwit_tx_list = self.nodes[2].getblock(blockhash)["tx"] assert_equal(len(segwit_tx_list), 5) self.log.info("Verify default node can't accept txs with missing witness") # unsigned, no scriptsig self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False) # unsigned with redeem script self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0])) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0])) self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag") assert(self.nodes[2].getblock(blockhash, False) != self.nodes[0].getblock(blockhash, False)) assert(self.nodes[1].getblock(blockhash, False) == self.nodes[2].getblock(blockhash, False)) for tx_id in segwit_tx_list: tx = FromHex(CTransaction(), self.nodes[2].gettransaction(tx_id)["hex"]) assert(self.nodes[2].getrawtransaction(tx_id, False, blockhash) != self.nodes[0].getrawtransaction(tx_id, False, blockhash)) assert(self.nodes[1].getrawtransaction(tx_id, False, blockhash) == self.nodes[2].getrawtransaction(tx_id, False, blockhash)) assert(self.nodes[0].getrawtransaction(tx_id, False, blockhash) != self.nodes[2].gettransaction(tx_id)["hex"]) assert(self.nodes[1].getrawtransaction(tx_id, False, blockhash) == self.nodes[2].gettransaction(tx_id)["hex"]) assert(self.nodes[0].getrawtransaction(tx_id, False, blockhash) == bytes_to_hex_str(tx.serialize_without_witness())) self.log.info("Verify witness txs without witness data are invalid after the fork") self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', wit_ids[NODE_2][WIT_V0][2], sign=False) self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', wit_ids[NODE_2][WIT_V1][2], sign=False) self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2])) self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2])) self.log.info("Verify default node can now use witness txs") self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) # block 432 self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) # block 433 self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) # block 434 self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) # block 435 self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data assert(tmpl['weightlimit'] == 4000000) assert(tmpl['sigoplimit'] == 80000) assert(tmpl['transactions'][0]['txid'] == txid) assert(tmpl['transactions'][0]['sigops'] == 8) self.nodes[0].generate(1) # Mine a block to clear the gbt cache self.log.info("Non-segwit miners are able to use GBT response after activation.") # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) -> # tx2 (segwit input, paying to a non-segwit output) -> # tx3 (non-segwit input, paying to a non-segwit output). # tx1 is allowed to appear in the block, but no others. txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996")) hex_tx = self.nodes[0].gettransaction(txid)['hex'] tx = FromHex(CTransaction(), hex_tx) assert(tx.wit.is_null()) # This should not be a segwit input assert(txid1 in self.nodes[0].getrawmempool()) # Now create tx2, which will spend from txid1. tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b'')) tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) tx.vout.append(CTxOut(int(49.996*COIN - 49.99*COIN))) tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex'] txid2 = self.nodes[0].sendrawtransaction(tx2_hex) tx = FromHex(CTransaction(), tx2_hex) assert(not tx.wit.is_null()) # Now create tx3, which will spend from txid2 tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b"")) tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee tx.vout.append(CTxOut(int(49.99*COIN - 49.95*COIN))) tx.calc_sha256() txid3 = self.nodes[0].sendrawtransaction(ToHex(tx)) assert(tx.wit.is_null()) assert(txid3 in self.nodes[0].getrawmempool()) # Check that getblocktemplate includes all transactions. template = self.nodes[0].getblocktemplate({"rules": ["segwit"]}) template_txids = [t['txid'] for t in template['transactions']] assert(txid1 in template_txids) assert(txid2 in template_txids) assert(txid3 in template_txids) # Check that wtxid is properly reported in mempool entry assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True)) # Mine a block to clear the gbt cache again. self.nodes[0].generate(1) self.log.info("Verify behaviour of importaddress and listunspent") # Some public keys to be used later pubkeys = [ "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97 "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66 "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ ] # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey("92SMa2mHdcWeSRxYL3n2mcv6vyxmvmQ326fpiDffnHwu1bbF6rB") uncompressed_spendable_address = ["2drVNrQ6G7UhTrV5hHzBnh1wPQ4P1SYwEYm"] self.nodes[0].importprivkey("cW6dVxPDzWUbs16ExyD6NkLDBtqwCB82o99PT95oYprGQ6Ao11YK") compressed_spendable_address = ["2dmiAZsM5F4TS1PCUKW2t3UYw6FupXPUmRS"] assert not self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed'] assert self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed'] self.nodes[0].importpubkey(pubkeys[0]) compressed_solvable_address = [key_to_p2pkh(pubkeys[0])] self.nodes[0].importpubkey(pubkeys[1]) compressed_solvable_address.append(key_to_p2pkh(pubkeys[1])) self.nodes[0].importpubkey(pubkeys[2]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])] spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress spendable_after_importaddress = [] # These outputs should be seen after importaddress solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress solvable_anytime = [] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address']) # Test multisig_without_privkey # We have 2 public keys without private keys, use addmultisigaddress to add to wallet. # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address. multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address'] script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG]) solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL])) for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with compressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with compressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) # P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in uncompressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with uncompressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): # Multisig without private is not seen after addmultisigaddress, but seen after importaddress [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) for i in uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress solvable_after_importaddress.extend([bare, p2sh]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) op1 = CScript([OP_1]) op0 = CScript([OP_0]) # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D") unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG]) unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)]) p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL]) p2wshop1 = CScript([OP_0, sha256(op1)]) unsolvable_after_importaddress.append(unsolvablep2pkh) unsolvable_after_importaddress.append(unsolvablep2wshp2pkh) unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script unsolvable_after_importaddress.append(p2wshop1) unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided unsolvable_after_importaddress.append(p2shop0) spendable_txid = [] solvable_txid = [] spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1)) self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0) importlist = [] for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): bare = hex_str_to_bytes(v['hex']) importlist.append(bytes_to_hex_str(bare)) importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)]))) else: pubkey = hex_str_to_bytes(v['pubkey']) p2pk = CScript([pubkey, OP_CHECKSIG]) p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG]) importlist.append(bytes_to_hex_str(p2pk)) importlist.append(bytes_to_hex_str(p2pkh)) importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)]))) importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)]))) importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)]))) importlist.append(bytes_to_hex_str(unsolvablep2pkh)) importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh)) importlist.append(bytes_to_hex_str(op1)) importlist.append(bytes_to_hex_str(p2wshop1)) for i in importlist: # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC # exceptions and continue. try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True) self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) self.mine_and_test_listunspent(unseen_anytime, 0) spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) self.mine_and_test_listunspent(unseen_anytime, 0) # Repeat some tests. This time we don't add witness scripts with importaddress # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey("91oxj3Pfsh9gwHMVf2f9jQviNwcUWhk6ik1prHYLcG8qxtD6ois") uncompressed_spendable_address = ["2dmXP31eCGJmej1TXJozZtQoCS4ccNHw33D"] self.nodes[0].importprivkey("cVf3aYYKKSJZZ6v17jgqkBpGfLjhcZg3cUnqpbghwdX1QVStrN63") compressed_spendable_address = ["2doZfDfrmngfWw3tWz9Z8MpNvHFPhDjzGaQ"] self.nodes[0].importpubkey(pubkeys[5]) compressed_solvable_address = [key_to_p2pkh(pubkeys[5])] self.nodes[0].importpubkey(pubkeys[6]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])] unseen_anytime = [] # These outputs should never be seen solvable_anytime = [] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address']) premature_witaddress = [] for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH are always spendable spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in uncompressed_spendable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable solvable_anytime.extend([p2wpkh, p2sh_p2wpkh]) self.mine_and_test_listunspent(spendable_anytime, 2) self.mine_and_test_listunspent(solvable_anytime, 1) self.mine_and_test_listunspent(unseen_anytime, 0) # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works v1_addr = program_to_witness(1, [3, 5]) v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])], {v1_addr: 1}) v1_decoded = self.nodes[1].decoderawtransaction(v1_tx) assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr) assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305") # Check that spendable outputs are really spendable self.create_and_mine_tx_from_txids(spendable_txid) # import all the private keys so solvable addresses become spendable self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb") self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97") self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV") self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd") self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66") self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K") self.create_and_mine_tx_from_txids(solvable_txid) # Test that importing native P2WPKH/P2WSH scripts works for use_p2wsh in [False, True]: if use_p2wsh: scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a" # original btc tx: # 01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000 transaction = "0100000000000101230f4f5d4b7c6fa845806ee4f67713459e1b69e8e60fcee2e4940c7a0d5de1b2010000000005f5e100002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000" else: scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87" # original btc tx: # 01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000 transaction = "0100000000000101230f4f5d4b7c6fa845806ee4f67713459e1b69e8e60fcee2e4940c7a0d5de1b2010000000005f5e1000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000" self.nodes[1].importaddress(scriptPubKey, "", False) rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex'] rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"] txid = self.nodes[1].sendrawtransaction(rawtxfund) assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid) # Assert it is properly saved self.stop_node(1) self.start_node(1) assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
def run_test(self): self.url = urllib.parse.urlparse(self.nodes[0].url) self.log.info("Mine blocks and send Usdecoin to node 1") # Random address so node1's balance doesn't increase not_related_address = "2MxqoHEdNQTyYeX1mHcbrrpzgojbosTpCvJ" self.nodes[0].generate(1) self.sync_all() self.nodes[1].generatetoaddress(100, not_related_address) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50) txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) self.sync_all() self.nodes[1].generatetoaddress(1, not_related_address) self.sync_all() bb_hash = self.nodes[0].getbestblockhash() assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) self.log.info("Load the transaction using the /tx URI") json_obj = self.test_rest_request("/tx/{}".format(txid)) spent = ( json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout'] ) # get the vin to later check for utxo (should be spent by then) # get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) self.log.info("Query an unspent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is one utxo assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['utxos'][0]['value'], Decimal('0.1')) self.log.info("Query a spent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is no utxo in the response because this outpoint has been spent assert_equal(len(json_obj['utxos']), 0) # Check bitmap assert_equal(json_obj['bitmap'], "0") self.log.info("Query two TXOs using the /getutxos URI") json_obj = self.test_rest_request( "/getutxos/{}-{}/{}-{}".format(*(spending + spent))) assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['bitmap'], "10") self.log.info( "Query the TXOs using the /getutxos URI with a binary response") bin_request = b'\x01\x02' for txid, n in [spending, spent]: bin_request += hex_str_to_bytes(txid) bin_request += pack("i", n) bin_response = self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body=bin_request, ret_type=RetType.BYTES) output = BytesIO(bin_response) chain_height, = unpack("i", output.read(4)) response_hash = binascii.hexlify(output.read(32)[::-1]).decode('ascii') assert_equal( bb_hash, response_hash ) # check if getutxo's chaintip during calculation was fine assert_equal(chain_height, 102) # chain height must be 102 self.log.info("Test the /getutxos URI with and without /checkmempool") # Create a transaction, check that it's found with /checkmempool, but # not found without. Then confirm the transaction and check that it's # found with or without /checkmempool. # do a tx and don't sync txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) json_obj = self.test_rest_request("/tx/{}".format(txid)) # get the spent output to later check for utxo (should be spent by then) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 0) json_obj = self.test_rest_request( "/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request( "/getutxos/checkmempool/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 0) self.nodes[0].generate(1) self.sync_all() json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request( "/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) # Do some invalid requests self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.JSON, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ) # Test limits long_uri = '/'.join(["{}-{}".format(txid, n_) for n_ in range(20)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=400, ret_type=RetType.OBJ) long_uri = '/'.join(['{}-{}'.format(txid, n_) for n_ in range(15)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200) self.nodes[0].generate( 1) # generate block to not affect upcoming tests self.sync_all() self.log.info("Test the /block and /headers URIs") bb_hash = self.nodes[0].getbestblockhash() # Check binary format response = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_greater_than(int(response.getheader('content-length')), 80) response_bytes = response.read() # Compare with block header response_header = self.test_rest_request( "/headers/1/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_equal(int(response_header.getheader('content-length')), 80) response_header_bytes = response_header.read() assert_equal(response_bytes[:80], response_header_bytes) # Check block hex format response_hex = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than(int(response_hex.getheader('content-length')), 160) response_hex_bytes = response_hex.read().strip(b'\n') assert_equal(binascii.hexlify(response_bytes), response_hex_bytes) # Compare with hex block header response_header_hex = self.test_rest_request( "/headers/1/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than( int(response_header_hex.getheader('content-length')), 160) response_header_hex_bytes = response_header_hex.read(160) assert_equal(binascii.hexlify(response_bytes[:80]), response_header_hex_bytes) # Check json format block_json_obj = self.test_rest_request("/block/{}".format(bb_hash)) assert_equal(block_json_obj['hash'], bb_hash) # Compare with json block header json_obj = self.test_rest_request("/headers/1/{}".format(bb_hash)) assert_equal(len(json_obj), 1) # ensure that there is one header in the json response assert_equal(json_obj[0]['hash'], bb_hash) # request/response hash should be the same # Compare with normal RPC block response rpc_block_json = self.nodes[0].getblock(bb_hash) for key in [ 'hash', 'confirmations', 'height', 'version', 'merkleroot', 'time', 'nonce', 'bits', 'difficulty', 'chainwork', 'previousblockhash' ]: assert_equal(json_obj[0][key], rpc_block_json[key]) # See if we can get 5 headers in one response self.nodes[1].generate(5) self.sync_all() json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash)) assert_equal(len(json_obj), 5) # now we should have 5 header objects self.log.info("Test the /tx URI") tx_hash = block_json_obj['tx'][0]['txid'] json_obj = self.test_rest_request("/tx/{}".format(tx_hash)) assert_equal(json_obj['txid'], tx_hash) # Check hex format response hex_response = self.test_rest_request("/tx/{}".format(tx_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than_or_equal( int(hex_response.getheader('content-length')), json_obj['size'] * 2) self.log.info("Test tx inclusion in the /mempool and /block URIs") # Make 3 tx and mine them on node 1 txs = [] txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) self.sync_all() # Check that there are exactly 3 transactions in the TX memory pool before generating the block json_obj = self.test_rest_request("/mempool/info") assert_equal(json_obj['size'], 3) # the size of the memory pool should be greater than 3x ~100 bytes assert_greater_than(json_obj['bytes'], 300) # Check that there are our submitted transactions in the TX memory pool json_obj = self.test_rest_request("/mempool/contents") for i, tx in enumerate(txs): assert tx in json_obj assert_equal(json_obj[tx]['spentby'], txs[i + 1:i + 2]) assert_equal(json_obj[tx]['depends'], txs[i - 1:i]) # Now mine the transactions newblockhash = self.nodes[1].generate(1) self.sync_all() # Check if the 3 tx show up in the new block json_obj = self.test_rest_request("/block/{}".format(newblockhash[0])) non_coinbase_txs = { tx['txid'] for tx in json_obj['tx'] if 'coinbase' not in tx['vin'][0] } assert_equal(non_coinbase_txs, set(txs)) # Check the same but without tx details json_obj = self.test_rest_request("/block/notxdetails/{}".format( newblockhash[0])) for tx in txs: assert tx in json_obj['tx'] self.log.info("Test the /chaininfo URI") bb_hash = self.nodes[0].getbestblockhash() json_obj = self.test_rest_request("/chaininfo") assert_equal(json_obj['bestblockhash'], bb_hash)
def run_test(self): node = self.nodes[0] # Generate 6 keys. rawkeys = [] pubkeys = [] for i in range(6): raw_key = CECKey() raw_key.set_secretbytes(('privkey%d' % i).encode('ascii')) rawkeys.append(raw_key) pubkeys = [CPubKey(key.get_pubkey()) for key in rawkeys] # Create a 4-of-6 multi-sig wallet with CLTV. height = 210 redeem_script = CScript( [CScriptNum(height), OP_CHECKLOCKTIMEVERIFY, OP_DROP ] # CLTV (lock_time >= 210) + [OP_4] + pubkeys + [OP_6, OP_CHECKMULTISIG]) # multi-sig hex_redeem_script = bytes_to_hex_str(redeem_script) p2sh_address = script_to_p2sh(redeem_script, main=False) # Send 1 coin to the mult-sig wallet. txid = node.sendtoaddress(p2sh_address, 1.0) raw_tx = node.getrawtransaction(txid, True) try: node.importaddress(hex_redeem_script, 'cltv', True, True) except Exception: pass assert_equal( sig(node.getreceivedbyaddress(p2sh_address, 0) - Decimal(1.0)), 0) # Mine one block to confirm the transaction. node.generate(1) # block 201 assert_equal( sig(node.getreceivedbyaddress(p2sh_address, 1) - Decimal(1.0)), 0) # Try to spend the coin. addr_to = node.getnewaddress('') # (1) Find the UTXO for vout in raw_tx['vout']: if vout['scriptPubKey']['addresses'] == [p2sh_address]: vout_n = vout['n'] hex_script_pubkey = raw_tx['vout'][vout_n]['scriptPubKey']['hex'] value = raw_tx['vout'][vout_n]['value'] # (2) Create a tx inputs = [{ "txid": txid, "vout": vout_n, "scriptPubKey": hex_script_pubkey, "redeemScript": hex_redeem_script, "amount": value, }] outputs = {addr_to: 0.999} lock_time = height hex_spend_raw_tx = node.createrawtransaction(inputs, outputs, lock_time) hex_funding_raw_tx = node.getrawtransaction(txid, False) # (3) Try to sign the spending tx. tx0 = CTransaction() tx0.deserialize(io.BytesIO(hex_str_to_bytes(hex_funding_raw_tx))) tx1 = CTransaction() tx1.deserialize(io.BytesIO(hex_str_to_bytes(hex_spend_raw_tx))) self.sign_tx(tx1, tx0, vout_n, redeem_script, 0, rawkeys[:4]) # Sign with key[0:4] # Mine some blocks to pass the lock time. node.generate(10) # Spend the CLTV multi-sig coins. raw_tx1 = tx1.serialize() hex_raw_tx1 = bytes_to_hex_str(raw_tx1) node.sendrawtransaction(hex_raw_tx1) # Check the tx is accepted by mempool but not confirmed. assert_equal( sig(node.getreceivedbyaddress(addr_to, 0) - Decimal(0.999)), 0) assert_equal(sig(node.getreceivedbyaddress(addr_to, 1)), 0) # Mine a block to confirm the tx. node.generate(1) assert_equal( sig(node.getreceivedbyaddress(addr_to, 1) - Decimal(0.999)), 0)
def run_test(self): print("Testing wallet secret recovery") self.test_wallet_recovery() print("Test blech32 python roundtrip") # blech/bech are aliased, both are blech32 for addrtype in ["bech32", "blech32"]: addr_to_rt = self.nodes[0].getnewaddress("", addrtype) hrp = addr_to_rt[:2] assert_equal(hrp, "el") (witver, witprog) = decode(hrp, addr_to_rt) assert_equal(encode(hrp, witver, witprog), addr_to_rt) # Test that "blech32" gives a blinded segwit address. blech32_addr = self.nodes[0].getnewaddress("", "blech32") blech32_addr_info = self.nodes[0].getaddressinfo(blech32_addr) assert_equal(blech32_addr_info["iswitness"], True) assert_equal(blech32_addr_info["confidential"], blech32_addr) print("General Confidential tests") # Running balances node0 = self.nodes[0].getbalance()["bitcoin"] assert_equal(node0, 21000000) # just making sure initialfreecoins is working node1 = 0 node2 = 0 self.nodes[0].generate(101) txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), node0, "", "", True) self.nodes[0].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance("*", 1, False, False, "bitcoin"), node2) # Send 3 BTC from 0 to a new unconfidential address of 2 with # the sendtoaddress call address = self.nodes[2].getnewaddress() unconfidential_address = self.nodes[2].validateaddress( address)["unconfidential"] value0 = 3 self.nodes[0].sendtoaddress(unconfidential_address, value0) self.nodes[0].generate(101) self.sync_all() node0 = node0 - value0 node2 = node2 + value0 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Send 5 BTC from 0 to a new address of 2 with the sendtoaddress call address2 = self.nodes[2].getnewaddress() unconfidential_address2 = self.nodes[2].validateaddress( address2)["unconfidential"] value1 = 5 confidential_tx_id = self.nodes[0].sendtoaddress(address2, value1) self.nodes[0].generate(101) self.sync_all() node0 = node0 - value1 node2 = node2 + value1 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Send 7 BTC from 0 to the unconfidential address of 2 and 11 BTC to the # confidential address using the raw transaction interface change_address = self.nodes[0].getnewaddress() value2 = 7 value3 = 11 value23 = value2 + value3 unspent = self.nodes[0].listunspent(1, 9999999, [], True, {"asset": "bitcoin"}) unspent = [i for i in unspent if i['amount'] > value23] assert_equal(len(unspent), 1) fee = Decimal('0.0001') tx = self.nodes[0].createrawtransaction( [{ "txid": unspent[0]["txid"], "vout": unspent[0]["vout"], "nValue": unspent[0]["amount"] }], { unconfidential_address: value2, address2: value3, change_address: unspent[0]["amount"] - value2 - value3 - fee, "fee": fee }) tx = self.nodes[0].blindrawtransaction(tx) tx_signed = self.nodes[0].signrawtransactionwithwallet(tx) raw_tx_id = self.nodes[0].sendrawtransaction(tx_signed['hex']) self.nodes[0].generate(101) self.sync_all() node0 -= (value2 + value3) node2 += value2 + value3 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Check 2's listreceivedbyaddress received_by_address = self.nodes[2].listreceivedbyaddress( 0, False, False, "", "bitcoin") validate_by_address = [(address2, value1 + value3), (address, value0 + value2)] assert_equal( sorted([(ele['address'], ele['amount']) for ele in received_by_address], key=lambda t: t[0]), sorted(validate_by_address, key=lambda t: t[0])) received_by_address = self.nodes[2].listreceivedbyaddress( 0, False, False, "") validate_by_address = [(address2, { "bitcoin": value1 + value3 }), (address, { "bitcoin": value0 + value2 })] assert_equal( sorted([(ele['address'], ele['amount']) for ele in received_by_address], key=lambda t: t[0]), sorted(validate_by_address, key=lambda t: t[0])) # Give an auditor (node 1) a blinding key to allow her to look at # transaction values self.nodes[1].importaddress(address2) received_by_address = self.nodes[1].listreceivedbyaddress( 1, False, True) #Node sees nothing unless it understands the values assert_equal(len(received_by_address), 0) assert_equal( len(self.nodes[1].listunspent(1, 9999999, [], True, {"asset": "bitcoin"})), 0) # Import the blinding key blindingkey = self.nodes[2].dumpblindingkey(address2) self.nodes[1].importblindingkey(address2, blindingkey) # Check the auditor's gettransaction and listreceivedbyaddress # Needs rescan to update wallet txns conf_tx = self.nodes[1].gettransaction(confidential_tx_id, True) assert_equal(conf_tx['amount']["bitcoin"], value1) # Make sure wallet can now deblind part of transaction deblinded_tx = self.nodes[1].unblindrawtransaction( conf_tx['hex'])['hex'] for output in self.nodes[1].decoderawtransaction(deblinded_tx)["vout"]: if "value" in output and output["scriptPubKey"]["type"] != "fee": assert_equal( output["scriptPubKey"]["addresses"][0], self.nodes[1].validateaddress(address2)['unconfidential']) found_unblinded = True assert found_unblinded assert_equal( self.nodes[1].gettransaction(raw_tx_id, True)['amount']["bitcoin"], value3) assert_equal( self.nodes[1].gettransaction(raw_tx_id, True, False, "bitcoin")['amount'], value3) list_unspent = self.nodes[1].listunspent(1, 9999999, [], True, {"asset": "bitcoin"}) assert_equal(list_unspent[0]['amount'] + list_unspent[1]['amount'], value1 + value3) received_by_address = self.nodes[1].listreceivedbyaddress( 1, False, True) assert_equal(len(received_by_address), 1) assert_equal((received_by_address[0]['address'], received_by_address[0]['amount']['bitcoin']), (unconfidential_address2, value1 + value3)) # Spending a single confidential output and sending it to a # unconfidential output is not possible with CT. Test the # correct behavior of blindrawtransaction. unspent = self.nodes[0].listunspent(1, 9999999, [], True, {"asset": "bitcoin"}) unspent = [i for i in unspent if i['amount'] > value23] assert_equal(len(unspent), 1) tx = self.nodes[0].createrawtransaction( [{ "txid": unspent[0]["txid"], "vout": unspent[0]["vout"], "nValue": unspent[0]["amount"] }], { unconfidential_address: unspent[0]["amount"] - fee, "fee": fee }) # Test that blindrawtransaction adds an OP_RETURN output to balance blinders temptx = self.nodes[0].blindrawtransaction(tx) decodedtx = self.nodes[0].decoderawtransaction(temptx) assert_equal(decodedtx["vout"][-1]["scriptPubKey"]["asm"], "OP_RETURN") assert_equal(len(decodedtx["vout"]), 3) # Create same transaction but with a change/dummy output. # It should pass the blinding step. value4 = 17 change_address = self.nodes[0].getrawchangeaddress() tx = self.nodes[0].createrawtransaction( [{ "txid": unspent[0]["txid"], "vout": unspent[0]["vout"], "nValue": unspent[0]["amount"] }], { unconfidential_address: value4, change_address: unspent[0]["amount"] - value4 - fee, "fee": fee }) tx = self.nodes[0].blindrawtransaction(tx) tx_signed = self.nodes[0].signrawtransactionwithwallet(tx) txid = self.nodes[0].sendrawtransaction(tx_signed['hex']) decodedtx = self.nodes[0].decoderawtransaction(tx_signed["hex"]) self.nodes[0].generate(101) self.sync_all() unblindfound = False for i in range(len(decodedtx["vout"])): txout = self.nodes[0].gettxout(txid, i) if txout is not None and "asset" in txout: unblindfound = True if unblindfound == False: raise Exception( "No unconfidential output detected when one should exist") node0 -= value4 node2 += value4 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Testing wallet's ability to deblind its own outputs addr = self.nodes[0].getnewaddress() addr2 = self.nodes[0].getnewaddress() # We add two to-blind outputs, fundraw adds an already-blinded change output # If we only add one, the newly blinded will be 0-blinded because input = -output raw = self.nodes[0].createrawtransaction([], { addr: Decimal('1.1'), addr2: 1 }) funded = self.nodes[0].fundrawtransaction(raw) # fund again to make sure no blinded outputs were created (would fail) funded = self.nodes[0].fundrawtransaction(funded["hex"]) blinded = self.nodes[0].blindrawtransaction(funded["hex"]) # blind again to make sure we know output blinders blinded2 = self.nodes[0].blindrawtransaction(blinded) # then sign and send signed = self.nodes[0].signrawtransactionwithwallet(blinded2) self.nodes[0].sendrawtransaction(signed["hex"]) # Aside: Check all outputs after fundraw are properly marked for blinding fund_decode = self.nodes[0].decoderawtransaction(funded["hex"]) for output in fund_decode["vout"][:-1]: assert "asset" in output assert "value" in output assert output["scriptPubKey"]["type"] != "fee" assert output["commitmentnonce_fully_valid"] assert fund_decode["vout"][-1]["scriptPubKey"]["type"] == "fee" assert not fund_decode["vout"][-1]["commitmentnonce_fully_valid"] # Also check that all fundraw outputs marked for blinding are blinded later for blind_tx in [blinded, blinded2]: blind_decode = self.nodes[0].decoderawtransaction(blind_tx) for output in blind_decode["vout"][:-1]: assert "asset" not in output assert "value" not in output assert output["scriptPubKey"]["type"] != "fee" assert output["commitmentnonce_fully_valid"] assert blind_decode["vout"][-1]["scriptPubKey"]["type"] == "fee" assert "asset" in blind_decode["vout"][-1] assert "value" in blind_decode["vout"][-1] assert not blind_decode["vout"][-1]["commitmentnonce_fully_valid"] # Check createblindedaddress functionality blinded_addr = self.nodes[0].getnewaddress() validated_addr = self.nodes[0].validateaddress(blinded_addr) blinding_pubkey = self.nodes[0].validateaddress( blinded_addr)["confidential_key"] blinding_key = self.nodes[0].dumpblindingkey(blinded_addr) assert_equal( blinded_addr, self.nodes[1].createblindedaddress( validated_addr["unconfidential"], blinding_pubkey)) # If a blinding key is over-ridden by a newly imported one, funds may be unaccounted for new_addr = self.nodes[0].getnewaddress() new_validated = self.nodes[0].validateaddress(new_addr) self.nodes[2].sendtoaddress(new_addr, 1) self.sync_all() diff_blind = self.nodes[1].createblindedaddress( new_validated["unconfidential"], blinding_pubkey) assert_equal( len(self.nodes[0].listunspent(0, 0, [new_validated["unconfidential"]])), 1) self.nodes[0].importblindingkey(diff_blind, blinding_key) # CT values for this wallet transaction have been cached via importblindingkey # therefore result will be same even though we change blinding keys assert_equal( len(self.nodes[0].listunspent(0, 0, [new_validated["unconfidential"]])), 1) # Confidential Assets Tests print("Assets tests...") # Bitcoin is the first issuance assert_equal(self.nodes[0].listissuances()[0]["assetlabel"], "bitcoin") assert_equal(len(self.nodes[0].listissuances()), 1) # Unblinded issuance of asset issued = self.nodes[0].issueasset(1, 1, False) self.nodes[0].reissueasset(issued["asset"], 1) # Compare resulting fields with getrawtransaction raw_details = self.nodes[0].getrawtransaction(issued["txid"], 1) assert_equal( issued["entropy"], raw_details["vin"][issued["vin"]]["issuance"]["assetEntropy"]) assert_equal(issued["asset"], raw_details["vin"][issued["vin"]]["issuance"]["asset"]) assert_equal(issued["token"], raw_details["vin"][issued["vin"]]["issuance"]["token"]) self.nodes[0].generate(1) self.sync_all() issued2 = self.nodes[0].issueasset(2, 1) test_asset = issued2["asset"] assert_equal(self.nodes[0].getwalletinfo()['balance'][test_asset], Decimal(2)) assert test_asset not in self.nodes[1].getwalletinfo()['balance'] # Assets balance checking, note that accounts are completely ignored because # balance queries with accounts are horrifically broken upstream assert_equal(self.nodes[0].getbalance("*", 0, False, False, "bitcoin"), self.nodes[0].getbalance("*", 0, False, False, "bitcoin")) assert_equal(self.nodes[0].getbalance("*", 0, False, False)["bitcoin"], self.nodes[0].getbalance("*", 0, False, False, "bitcoin")) assert_equal(self.nodes[0].getwalletinfo()['balance']['bitcoin'], self.nodes[0].getbalance("*", 0, False, False, "bitcoin")) # Send some bitcoin and other assets over as well to fund wallet addr = self.nodes[2].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 5) # Make sure we're doing 52 bits of hiding which covers 21M BTC worth assert_equal( self.nodes[0].getrawtransaction(txid, 1)["vout"][0]["ct-bits"], 52) self.nodes[0].sendmany("", { addr: 1, self.nodes[2].getnewaddress(): 13 }, 0, "", [], False, 1, "UNSET", {addr: test_asset}) self.sync_all() # Should have exactly 1 in change(trusted, though not confirmed) after sending one off assert_equal( self.nodes[0].getbalance("*", 0, False, False, test_asset), 1) assert_equal(self.nodes[2].getunconfirmedbalance()[test_asset], Decimal(1)) b_utxos = self.nodes[2].listunspent(0, 0, [], True, {"asset": "bitcoin"}) t_utxos = self.nodes[2].listunspent(0, 0, [], True, {"asset": test_asset}) assert_equal(len(self.nodes[2].listunspent(0, 0, [])), len(b_utxos) + len(t_utxos)) # Now craft a blinded transaction via raw api rawaddrs = [] for i in range(2): rawaddrs.append(self.nodes[1].getnewaddress()) raw_assets = self.nodes[2].createrawtransaction( [{ "txid": b_utxos[0]['txid'], "vout": b_utxos[0]['vout'], "nValue": b_utxos[0]['amount'] }, { "txid": b_utxos[1]['txid'], "vout": b_utxos[1]['vout'], "nValue": b_utxos[1]['amount'], "asset": b_utxos[1]['asset'] }, { "txid": t_utxos[0]['txid'], "vout": t_utxos[0]['vout'], "nValue": t_utxos[0]['amount'], "asset": t_utxos[0]['asset'] }], { rawaddrs[1]: Decimal(t_utxos[0]['amount']), rawaddrs[0]: Decimal(b_utxos[0]['amount'] + b_utxos[1]['amount'] - Decimal("0.01")), "fee": Decimal("0.01") }, 0, False, { rawaddrs[0]: b_utxos[0]['asset'], rawaddrs[1]: t_utxos[0]['asset'], "fee": b_utxos[0]['asset'] }) # Sign unblinded, then blinded signed_assets = self.nodes[2].signrawtransactionwithwallet(raw_assets) blind_assets = self.nodes[2].blindrawtransaction(raw_assets) signed_assets = self.nodes[2].signrawtransactionwithwallet( blind_assets) # And finally send self.nodes[2].sendrawtransaction(signed_assets['hex']) self.nodes[2].generate(101) self.sync_all() issuancedata = self.nodes[2].issueasset( 0, Decimal('0.00000006')) #0 of asset, 6 reissuance token # Node 2 will send node 1 a reissuance token, both will generate assets self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), Decimal('0.00000001'), "", "", False, False, 1, "UNSET", False, issuancedata["token"]) # node 1 needs to know about a (re)issuance to reissue itself self.nodes[1].importaddress(self.nodes[2].gettransaction( issuancedata["txid"])["details"][0]["address"]) # also send some bitcoin self.nodes[2].generate(1) self.sync_all() self.nodes[1].reissueasset(issuancedata["asset"], Decimal('0.05')) self.nodes[2].reissueasset(issuancedata["asset"], Decimal('0.025')) self.nodes[1].generate(1) self.sync_all() # Check for value accounting when asset issuance is null but token not, ie unblinded # HACK: Self-send to sweep up bitcoin inputs into blinded output. # We were hitting https://github.com/ElementsProject/elements/issues/473 for the following issuance self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), self.nodes[0].getwalletinfo()["balance"]["bitcoin"], "", "", True) issued = self.nodes[0].issueasset(0, 1, False) walletinfo = self.nodes[0].getwalletinfo() assert issued["asset"] not in walletinfo["balance"] assert_equal(walletinfo["balance"][issued["token"]], Decimal(1)) assert issued["asset"] not in walletinfo["unconfirmed_balance"] assert issued["token"] not in walletinfo["unconfirmed_balance"] # Check for value when receiving different assets by same address. self.nodes[0].sendtoaddress(unconfidential_address2, Decimal('0.00000001'), "", "", False, False, 1, "UNSET", False, test_asset) self.nodes[0].sendtoaddress(unconfidential_address2, Decimal('0.00000002'), "", "", False, False, 1, "UNSET", False, test_asset) self.nodes[0].generate(1) self.sync_all() received_by_address = self.nodes[1].listreceivedbyaddress( 0, False, True) multi_asset_amount = [ x for x in received_by_address if x['address'] == unconfidential_address2 ][0]['amount'] assert_equal(multi_asset_amount['bitcoin'], value1 + value3) assert_equal(multi_asset_amount[test_asset], Decimal('0.00000003')) # Check blinded multisig functionality and partial blinding functionality # Get two pubkeys blinded_addr = self.nodes[0].getnewaddress() pubkey = self.nodes[0].getaddressinfo(blinded_addr)["pubkey"] blinded_addr2 = self.nodes[1].getnewaddress() pubkey2 = self.nodes[1].getaddressinfo(blinded_addr2)["pubkey"] pubkeys = [pubkey, pubkey2] # Add multisig address unconfidential_addr = self.nodes[0].addmultisigaddress( 2, pubkeys)["address"] self.nodes[1].addmultisigaddress(2, pubkeys) self.nodes[0].importaddress(unconfidential_addr) self.nodes[1].importaddress(unconfidential_addr) # Use blinding key from node 0's original getnewaddress call blinding_pubkey = self.nodes[0].getaddressinfo( blinded_addr)["confidential_key"] blinding_key = self.nodes[0].dumpblindingkey(blinded_addr) # Create blinded address from p2sh address and import corresponding privkey blinded_multisig_addr = self.nodes[0].createblindedaddress( unconfidential_addr, blinding_pubkey) self.nodes[0].importblindingkey(blinded_multisig_addr, blinding_key) # Issue new asset, to use different assets in one transaction when doing # partial blinding. Just to make these tests a bit more elaborate :-) issued3 = self.nodes[2].issueasset(1, 0) self.nodes[2].generate(1) self.sync_all() node2_balance = self.nodes[2].getbalance() assert issued3['asset'] in node2_balance assert_equal(node2_balance[issued3['asset']], Decimal(1)) # Send asset to blinded multisig address and check that it was received self.nodes[2].sendtoaddress(address=blinded_multisig_addr, amount=1, assetlabel=issued3['asset']) self.sync_all() # We will use this multisig UTXO in our partially-blinded transaction, # and will also check that multisig UTXO can be successfully spent # after the transaction is signed by node1 and node0 in succession. unspent_asset = self.nodes[0].listunspent(0, 0, [unconfidential_addr], True, {"asset": issued3['asset']}) assert_equal(len(unspent_asset), 1) assert issued3['asset'] not in self.nodes[2].getbalance() # Create new UTXO on node0 to be used in our partially-blinded transaction blinded_addr = self.nodes[0].getnewaddress() addr = self.nodes[0].validateaddress(blinded_addr)["unconfidential"] self.nodes[0].sendtoaddress(blinded_addr, 0.1) unspent = self.nodes[0].listunspent(0, 0, [addr]) assert_equal(len(unspent), 1) # Create new UTXO on node1 to be used in our partially-blinded transaction blinded_addr2 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].validateaddress(blinded_addr2)["unconfidential"] self.nodes[1].sendtoaddress(blinded_addr2, 0.11) unspent2 = self.nodes[1].listunspent(0, 0, [addr2]) assert_equal(len(unspent2), 1) # The transaction will have three non-fee outputs dst_addr = self.nodes[0].getnewaddress() dst_addr2 = self.nodes[1].getnewaddress() dst_addr3 = self.nodes[2].getnewaddress() # Inputs are selected up front inputs = [{ "txid": unspent2[0]["txid"], "vout": unspent2[0]["vout"] }, { "txid": unspent[0]["txid"], "vout": unspent[0]["vout"] }, { "txid": unspent_asset[0]["txid"], "vout": unspent_asset[0]["vout"] }] # Create one part of the transaction to partially blind rawtx = self.nodes[0].createrawtransaction( inputs[:1], {dst_addr2: Decimal("0.01")}) # Create another part of the transaction to partially blind rawtx2 = self.nodes[0].createrawtransaction( inputs[1:], { dst_addr: Decimal("0.1"), dst_addr3: Decimal("1.0") }, 0, False, { dst_addr: unspent[0]['asset'], dst_addr3: unspent_asset[0]['asset'] }) sum_i = unspent2[0]["amount"] + unspent[0]["amount"] sum_o = 0.01 + 0.10 + 0.1 assert_equal(int(round(sum_i * COIN)), int(round(sum_o * COIN))) # Blind the first part of the transaction - we need to supply the # assetcommmitments for all of the inputs, for the surjectionproof # to be valid after we combine the transactions blindtx = self.nodes[1].blindrawtransaction(rawtx, True, [ unspent2[0]['assetcommitment'], unspent[0]['assetcommitment'], unspent_asset[0]['assetcommitment'] ]) # Combine the transactions # Blinded, but incomplete transaction. # 1 inputs and 1 output, but no fee output, and # it was blinded with 3 asset commitments, that means # the final transaction should have 3 inputs. btx = CTransaction() btx.deserialize(io.BytesIO(hex_str_to_bytes(blindtx))) # Unblinded transaction, with 2 inputs and 2 outputs. # We will add them to the other transaction to make it complete. ubtx = CTransaction() ubtx.deserialize(io.BytesIO(hex_str_to_bytes(rawtx2))) # We will add outputs of unblinded transaction # on top of inputs and outputs of the blinded, but incomplete transaction. # We also append empty witness instances to make witness arrays match # vin/vout arrays btx.vin.append(ubtx.vin[0]) btx.wit.vtxinwit.append(CTxInWitness()) btx.vout.append(ubtx.vout[0]) btx.wit.vtxoutwit.append(CTxOutWitness()) btx.vin.append(ubtx.vin[1]) btx.wit.vtxinwit.append(CTxInWitness()) btx.vout.append(ubtx.vout[1]) btx.wit.vtxoutwit.append(CTxOutWitness()) # Add explicit fee output btx.vout.append( CTxOut(nValue=CTxOutValue(10000000), nAsset=CTxOutAsset(BITCOIN_ASSET_OUT))) btx.wit.vtxoutwit.append(CTxOutWitness()) # Input 0 is bitcoin asset (already blinded) # Input 1 is also bitcoin asset # Input 2 is our new asset # Blind with wrong order of assetcommitments - such transaction should be rejected blindtx = self.nodes[0].blindrawtransaction( btx.serialize().hex(), True, [ unspent_asset[0]['assetcommitment'], unspent[0]['assetcommitment'], unspent2[0]['assetcommitment'] ]) stx2 = self.nodes[1].signrawtransactionwithwallet(blindtx) stx = self.nodes[0].signrawtransactionwithwallet(stx2['hex']) self.sync_all() assert_raises_rpc_error(-26, "bad-txns-in-ne-out", self.nodes[2].sendrawtransaction, stx['hex']) # Blind with correct order of assetcommitments blindtx = self.nodes[0].blindrawtransaction( btx.serialize().hex(), True, [ unspent2[0]['assetcommitment'], unspent[0]['assetcommitment'], unspent_asset[0]['assetcommitment'] ]) stx2 = self.nodes[1].signrawtransactionwithwallet(blindtx) stx = self.nodes[0].signrawtransactionwithwallet(stx2['hex']) txid = self.nodes[2].sendrawtransaction(stx['hex']) self.nodes[2].generate(1) assert self.nodes[2].gettransaction(txid)['confirmations'] == 1 self.sync_all() # Check that the sent asset has reached its destination unconfidential_dst_addr3 = self.nodes[2].validateaddress( dst_addr3)["unconfidential"] unspent_asset2 = self.nodes[2].listunspent(1, 1, [unconfidential_dst_addr3], True, {"asset": issued3['asset']}) assert_equal(len(unspent_asset2), 1) assert_equal(unspent_asset2[0]['amount'], Decimal(1)) # And that the balance was correctly updated assert_equal(self.nodes[2].getbalance()[issued3['asset']], Decimal(1)) # Basic checks of rawblindrawtransaction functionality blinded_addr = self.nodes[0].getnewaddress() addr = self.nodes[0].validateaddress(blinded_addr)["unconfidential"] self.nodes[0].sendtoaddress(blinded_addr, 1) self.nodes[0].sendtoaddress(blinded_addr, 3) unspent = self.nodes[0].listunspent(0, 0) rawtx = self.nodes[0].createrawtransaction( [{ "txid": unspent[0]["txid"], "vout": unspent[0]["vout"] }, { "txid": unspent[1]["txid"], "vout": unspent[1]["vout"] }], { addr: unspent[0]["amount"] + unspent[1]["amount"] - Decimal("0.2"), "fee": Decimal("0.2") }) # Blinding will fail with 2 blinded inputs and 0 blinded outputs # since it has no notion of a wallet to fill in a 0-value OP_RETURN output try: self.nodes[0].rawblindrawtransaction( rawtx, [unspent[0]["amountblinder"], unspent[1]["amountblinder"]], [unspent[0]["amount"], unspent[1]["amount"]], [unspent[0]["asset"], unspent[1]["asset"]], [unspent[0]["assetblinder"], unspent[1]["assetblinder"]]) raise AssertionError( "Shouldn't be able to blind 2 input 0 output transaction via rawblindraw" ) except JSONRPCException: pass # Blinded destination added, can blind, sign and send rawtx = self.nodes[0].createrawtransaction( [{ "txid": unspent[0]["txid"], "vout": unspent[0]["vout"] }, { "txid": unspent[1]["txid"], "vout": unspent[1]["vout"] }], { blinded_addr: unspent[0]["amount"] + unspent[1]["amount"] - Decimal("0.002"), "fee": Decimal("0.002") }) signtx = self.nodes[0].signrawtransactionwithwallet(rawtx) try: self.nodes[0].sendrawtransaction(signtx["hex"]) raise AssertionError( "Shouldn't be able to send unblinded tx with emplaced pubkey in output without additional argument" ) except JSONRPCException: pass # Make sure RPC throws when an invalid blinding factor is provided. bad_blinder = 'FF' * 32 assert_raises_rpc_error( -8, "Unable to blind transaction: Are you sure each asset type to blind is represented in the inputs?", self.nodes[0].rawblindrawtransaction, rawtx, [unspent[0]["amountblinder"], bad_blinder], [unspent[0]["amount"], unspent[1]["amount"]], [unspent[0]["asset"], unspent[1]["asset"]], [unspent[0]["assetblinder"], unspent[1]["assetblinder"]]) assert_raises_rpc_error( -8, "Unable to blind transaction: Are you sure each asset type to blind is represented in the inputs?", self.nodes[0].rawblindrawtransaction, rawtx, [unspent[0]["amountblinder"], unspent[1]["amountblinder"]], [unspent[0]["amount"], unspent[1]["amount"]], [unspent[0]["asset"], unspent[1]["asset"]], [unspent[0]["assetblinder"], bad_blinder]) blindtx = self.nodes[0].rawblindrawtransaction( rawtx, [unspent[0]["amountblinder"], unspent[1]["amountblinder"]], [unspent[0]["amount"], unspent[1]["amount"]], [unspent[0]["asset"], unspent[1]["asset"]], [unspent[0]["assetblinder"], unspent[1]["assetblinder"]]) signtx = self.nodes[0].signrawtransactionwithwallet(blindtx) txid = self.nodes[0].sendrawtransaction(signtx["hex"]) for output in self.nodes[0].decoderawtransaction(blindtx)["vout"]: if "asset" in output and output["scriptPubKey"]["type"] != "fee": raise AssertionError("An unblinded output exists") # Test fundrawtransaction with multiple assets issue = self.nodes[0].issueasset(1, 0) assetaddr = self.nodes[0].getnewaddress() rawtx = self.nodes[0].createrawtransaction( [], { assetaddr: 1, self.nodes[0].getnewaddress(): 2 }, 0, False, {assetaddr: issue["asset"]}) funded = self.nodes[0].fundrawtransaction(rawtx) blinded = self.nodes[0].blindrawtransaction(funded["hex"]) signed = self.nodes[0].signrawtransactionwithwallet(blinded) txid = self.nodes[0].sendrawtransaction(signed["hex"]) # Test fundrawtransaction with multiple inputs, creating > vout.size change rawtx = self.nodes[0].createrawtransaction( [{ "txid": txid, "vout": 0 }, { "txid": txid, "vout": 1 }], {self.nodes[0].getnewaddress(): 5}) funded = self.nodes[0].fundrawtransaction(rawtx) blinded = self.nodes[0].blindrawtransaction(funded["hex"]) signed = self.nodes[0].signrawtransactionwithwallet(blinded) txid = self.nodes[0].sendrawtransaction(signed["hex"]) # Test corner case where wallet appends a OP_RETURN output, yet doesn't blind it # due to the fact that the output value is 0-value and input pedersen commitments # self-balance. This is rare corner case, but ok. unblinded = self.nodes[0].validateaddress( self.nodes[0].getnewaddress())["unconfidential"] self.nodes[0].sendtoaddress(unblinded, self.nodes[0].getbalance()["bitcoin"], "", "", True) # Make tx with blinded destination and change outputs only self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[0].getbalance()["bitcoin"] / 2) # Send back again, this transaction should have 3 outputs, all unblinded txid = self.nodes[0].sendtoaddress( unblinded, self.nodes[0].getbalance()["bitcoin"], "", "", True) outputs = self.nodes[0].getrawtransaction(txid, 1)["vout"] assert_equal(len(outputs), 3) assert "value" in outputs[0] and "value" in outputs[ 1] and "value" in outputs[2] assert_equal(outputs[2]["scriptPubKey"]["type"], 'nulldata') # Test burn argument in createrawtransaction raw_burn1 = self.nodes[0].createrawtransaction( [], { self.nodes[0].getnewaddress(): 1, "burn": 2 }) decode_burn1 = self.nodes[0].decoderawtransaction(raw_burn1) assert_equal(len(decode_burn1["vout"]), 2) found_pay = False found_burn = False for output in decode_burn1["vout"]: if output["scriptPubKey"]["asm"] == "OP_RETURN": found_burn = True if output["asset"] != self.nodes[0].dumpassetlabels( )["bitcoin"]: raise Exception( "Burn should have been bitcoin(policyAsset)") if output["scriptPubKey"]["type"] == "witness_v0_keyhash": found_pay = True assert found_pay and found_burn raw_burn2 = self.nodes[0].createrawtransaction( [], { self.nodes[0].getnewaddress(): 1, "burn": 2 }, 101, False, {"burn": "deadbeef" * 8}) decode_burn2 = self.nodes[0].decoderawtransaction(raw_burn2) assert_equal(len(decode_burn2["vout"]), 2) found_pay = False found_burn = False for output in decode_burn2["vout"]: if output["scriptPubKey"]["asm"] == "OP_RETURN": found_burn = True if output["asset"] != "deadbeef" * 8: raise Exception("Burn should have been deadbeef") if output["scriptPubKey"]["type"] == "witness_v0_keyhash": found_pay = True assert found_pay and found_burn
def run_test(self): self.import_deterministic_coinbase_privkeys( ) # Create wallets for all nodes parent = self.nodes[0] #parent2 = self.nodes[1] sidechain = self.nodes[2] sidechain2 = self.nodes[3] # If we're testing post-transition, force a fedpegscript transition and # getting rid of old fedpegscript by making at least another epoch pass by WSH_OP_TRUE = self.nodes[0].decodescript("51")["segwit"]["hex"] # We just randomize the keys a bit to get another valid fedpegscript new_fedpegscript = sidechain.tweakfedpegscript("f00dbabe")["script"] if self.options.post_transition: print("Running test post-transition") for _ in range(30): block_hex = sidechain.getnewblockhex( 0, { "signblockscript": WSH_OP_TRUE, "max_block_witness": 10, "fedpegscript": new_fedpegscript, "extension_space": [] }) sidechain.submitblock(block_hex) assert_equal(sidechain.getsidechaininfo()["current_fedpegscripts"], [new_fedpegscript] * 2) if self.options.pre_transition: print( "Running test pre-transition, dynafed activated from first block" ) for node in self.nodes: node.importprivkey(privkey=node.get_deterministic_priv_key().key, label="mining") util.node_fastmerkle = sidechain parent.generate(101) sidechain.generate(101) self.log.info("sidechain info: {}".format( sidechain.getsidechaininfo())) addrs = sidechain.getpeginaddress() addr = addrs["mainchain_address"] assert_equal( sidechain.decodescript(addrs["claim_script"])["type"], "witness_v0_keyhash") txid1 = parent.sendtoaddress(addr, 24) vout = find_vout_for_address(parent, txid1, addr) # 10+2 confirms required to get into mempool and confirm assert_equal(sidechain.getsidechaininfo()["pegin_confirmation_depth"], 10) parent.generate(1) time.sleep(2) proof = parent.gettxoutproof([txid1]) raw = parent.gettransaction(txid1)["hex"] # Create a wallet in order to test that multi-wallet support works correctly for claimpegin # (Regression test for https://github.com/ElementsProject/elements/issues/812 .) sidechain.createwallet("throwaway") # Set up our sidechain RPCs to use the first wallet (with empty name). We do this by # overriding the RPC object in a hacky way, to avoid breaking a different hack on TestNode # that enables generate() to work despite the deprecation of the generate RPC. sidechain.rpc = sidechain.get_wallet_rpc("") print("Attempting peg-ins") # First attempt fails the consensus check but gives useful result try: pegtxid = sidechain.claimpegin(raw, proof) raise Exception( "Peg-in should not be mature enough yet, need another block.") except JSONRPCException as e: assert "Peg-in Bitcoin transaction needs more confirmations to be sent." in e.error[ "message"] # Second attempt simply doesn't hit mempool bar parent.generate(10) try: pegtxid = sidechain.claimpegin(raw, proof) raise Exception( "Peg-in should not be mature enough yet, need another block.") except JSONRPCException as e: assert "Peg-in Bitcoin transaction needs more confirmations to be sent." in e.error[ "message"] try: pegtxid = sidechain.createrawpegin(raw, proof, 'AEIOU') raise Exception("Peg-in with non-hex claim_script should fail.") except JSONRPCException as e: assert "Given claim_script is not hex." in e.error["message"] # Should fail due to non-matching wallet address try: scriptpubkey = sidechain.getaddressinfo( get_new_unconfidential_address(sidechain))["scriptPubKey"] pegtxid = sidechain.claimpegin(raw, proof, scriptpubkey) raise Exception( "Peg-in with non-matching claim_script should fail.") except JSONRPCException as e: assert "Given claim_script does not match the given Bitcoin transaction." in e.error[ "message"] # 12 confirms allows in mempool parent.generate(1) # Make sure that a tx with a duplicate pegin claim input gets rejected. raw_pegin = sidechain.createrawpegin(raw, proof)["hex"] raw_pegin = FromHex(CTransaction(), raw_pegin) raw_pegin.vin.append(raw_pegin.vin[0]) # duplicate the pegin input raw_pegin = sidechain.signrawtransactionwithwallet( raw_pegin.serialize().hex())["hex"] assert_raises_rpc_error(-26, "bad-txns-inputs-duplicate", sidechain.sendrawtransaction, raw_pegin) # Also try including this tx in a block manually and submitting it. doublespendblock = FromHex(CBlock(), sidechain.getnewblockhex()) doublespendblock.vtx.append(FromHex(CTransaction(), raw_pegin)) doublespendblock.hashMerkleRoot = doublespendblock.calc_merkle_root() add_witness_commitment(doublespendblock) doublespendblock.solve() block_hex = doublespendblock.serialize(True).hex() assert_raises_rpc_error(-25, "bad-txns-inputs-duplicate", sidechain.testproposedblock, block_hex, True) # Should succeed via wallet lookup for address match, and when given raw_pegin = sidechain.createrawpegin(raw, proof)['hex'] signed_pegin = sidechain.signrawtransactionwithwallet(raw_pegin) # Find the address that the peg-in used outputs = [] for pegin_vout in sidechain.decoderawtransaction(raw_pegin)['vout']: if pegin_vout['scriptPubKey']['type'] == 'witness_v0_keyhash': outputs.append({ pegin_vout['scriptPubKey']['addresses'][0]: pegin_vout['value'] }) elif pegin_vout['scriptPubKey']['type'] == 'fee': outputs.append({"fee": pegin_vout['value']}) # Check the createrawtransaction makes the same unsigned peg-in transaction raw_pegin2 = sidechain.createrawtransaction( [{ "txid": txid1, "vout": vout, "pegin_bitcoin_tx": raw, "pegin_txout_proof": proof, "pegin_claim_script": addrs["claim_script"] }], outputs) assert_equal(raw_pegin, raw_pegin2) # Check that createpsbt makes the correct unsigned peg-in pegin_psbt = sidechain.createpsbt( [{ "txid": txid1, "vout": vout, "pegin_bitcoin_tx": raw, "pegin_txout_proof": proof, "pegin_claim_script": addrs["claim_script"] }], outputs) decoded_psbt = sidechain.decodepsbt(pegin_psbt) # Check that pegin_bitcoin_tx == raw, but due to stripping witnesses, we need to compare their txids txid1 = parent.decoderawtransaction( decoded_psbt['inputs'][0]['pegin_bitcoin_tx'])['txid'] txid2 = parent.decoderawtransaction(raw)['txid'] assert_equal(txid1, txid2) # Check the rest assert_equal(decoded_psbt['inputs'][0]['pegin_claim_script'], addrs["claim_script"]) assert_equal(decoded_psbt['inputs'][0]['pegin_txout_proof'], proof) assert_equal(decoded_psbt['inputs'][0]['pegin_genesis_hash'], parent.getblockhash(0)) # Make a psbt without those peg-in data and merge them merge_pegin_psbt = sidechain.createpsbt([{ "txid": txid1, "vout": vout }], outputs) decoded_psbt = sidechain.decodepsbt(merge_pegin_psbt) assert 'pegin_bitcoin_tx' not in decoded_psbt['inputs'][0] assert 'pegin_claim_script' not in decoded_psbt['inputs'][0] assert 'pegin_txout_proof' not in decoded_psbt['inputs'][0] assert 'pegin_genesis_hash' not in decoded_psbt['inputs'][0] merged_pegin_psbt = sidechain.combinepsbt( [pegin_psbt, merge_pegin_psbt]) assert_equal(pegin_psbt, merged_pegin_psbt) # Now sign the psbt signed_psbt = sidechain.walletsignpsbt(pegin_psbt) # Finalize and extract and compare fin_psbt = sidechain.finalizepsbt(signed_psbt['psbt']) assert_equal(fin_psbt, signed_pegin) # Try funding a psbt with the peg-in assert_equal(sidechain.getbalance()['bitcoin'], 50) out_bal = 0 outputs.append({sidechain.getnewaddress(): 49.999}) for out in outputs: for val in out.values(): out_bal += Decimal(val) assert_greater_than(out_bal, 50) pegin_psbt = sidechain.walletcreatefundedpsbt( [{ "txid": txid1, "vout": vout, "pegin_bitcoin_tx": raw, "pegin_txout_proof": proof, "pegin_claim_script": addrs["claim_script"] }], outputs, 0, {'add_inputs': True}) signed_psbt = sidechain.walletsignpsbt(pegin_psbt['psbt']) fin_psbt = sidechain.finalizepsbt(signed_psbt['psbt']) assert fin_psbt['complete'] sample_pegin_struct = FromHex(CTransaction(), signed_pegin["hex"]) # Round-trip peg-in transaction using python serialization assert_equal(signed_pegin["hex"], sample_pegin_struct.serialize().hex()) # Store this for later (evil laugh) sample_pegin_witness = sample_pegin_struct.wit.vtxinwit[0].peginWitness pegtxid1 = sidechain.claimpegin(raw, proof) # Make sure a second pegin claim does not get accepted in the mempool when # another mempool tx already claims that pegin. assert_raises_rpc_error(-4, "txn-mempool-conflict", sidechain.claimpegin, raw, proof) # Will invalidate the block that confirms this transaction later for node_group in self.node_groups: self.sync_all(node_group) blockhash = sidechain2.generate(1) for node_group in self.node_groups: self.sync_all(node_group) sidechain.generate(5) tx1 = sidechain.gettransaction(pegtxid1) if "confirmations" in tx1 and tx1["confirmations"] == 6: print("Peg-in is confirmed: Success!") else: raise Exception("Peg-in confirmation has failed.") # Look at pegin fields decoded = sidechain.decoderawtransaction(tx1["hex"]) assert decoded["vin"][0]["is_pegin"] == True assert len(decoded["vin"][0]["pegin_witness"]) > 0 # Check that there's sufficient fee for the peg-in vsize = decoded["vsize"] fee_output = decoded["vout"][1] fallbackfee_pervbyte = Decimal("0.00001") / Decimal("1000") assert fee_output["scriptPubKey"]["type"] == "fee" assert fee_output["value"] >= fallbackfee_pervbyte * vsize # Quick reorg checks of pegs sidechain.invalidateblock(blockhash[0]) if sidechain.gettransaction(pegtxid1)["confirmations"] != 0: raise Exception( "Peg-in didn't unconfirm after invalidateblock call.") # Re-org causes peg-ins to get booted(wallet will resubmit in 10 minutes) assert_equal(sidechain.getrawmempool(), []) sidechain.sendrawtransaction(tx1["hex"]) # Create duplicate claim, put it in block along with current one in mempool # to test duplicate-in-block claims between two txs that are in the same block. raw_pegin = sidechain.createrawpegin(raw, proof)["hex"] raw_pegin = sidechain.signrawtransactionwithwallet(raw_pegin)["hex"] raw_pegin = FromHex(CTransaction(), raw_pegin) doublespendblock = FromHex(CBlock(), sidechain.getnewblockhex()) assert len(doublespendblock.vtx) == 2 # coinbase and pegin doublespendblock.vtx.append(raw_pegin) doublespendblock.hashMerkleRoot = doublespendblock.calc_merkle_root() add_witness_commitment(doublespendblock) doublespendblock.solve() block_hex = doublespendblock.serialize(True).hex() assert_raises_rpc_error(-25, "bad-txns-double-pegin", sidechain.testproposedblock, block_hex, True) # Re-enters block sidechain.generate(1) if sidechain.gettransaction(pegtxid1)["confirmations"] != 1: raise Exception("Peg-in should have one confirm on side block.") sidechain.reconsiderblock(blockhash[0]) if sidechain.gettransaction(pegtxid1)["confirmations"] != 6: raise Exception("Peg-in should be back to 6 confirms.") # Now the pegin is already claimed in a confirmed tx. # In that case, a duplicate claim should (1) not be accepted in the mempool # and (2) not be accepted in a block. assert_raises_rpc_error(-4, "pegin-already-claimed", sidechain.claimpegin, raw, proof) # For case (2), manually craft a block and include the tx. doublespendblock = FromHex(CBlock(), sidechain.getnewblockhex()) doublespendblock.vtx.append(raw_pegin) doublespendblock.hashMerkleRoot = doublespendblock.calc_merkle_root() add_witness_commitment(doublespendblock) doublespendblock.solve() block_hex = doublespendblock.serialize(True).hex() assert_raises_rpc_error(-25, "bad-txns-double-pegin", sidechain.testproposedblock, block_hex, True) # Do multiple claims in mempool n_claims = 6 print("Flooding mempool with a few claims") pegtxs = [] sidechain.generate(101) # Do mixture of raw peg-in and automatic peg-in tx construction # where raw creation is done on another node for i in range(n_claims): addrs = sidechain.getpeginaddress() txid = parent.sendtoaddress(addrs["mainchain_address"], 1) parent.generate(1) proof = parent.gettxoutproof([txid]) raw = parent.gettransaction(txid)["hex"] if i % 2 == 0: parent.generate(11) pegtxs += [sidechain.claimpegin(raw, proof)] else: # The raw API doesn't check for the additional 2 confirmation buffer # So we only get 10 confirms then send off. Miners will add to block anyways. # Don't mature whole way yet to test signing immature peg-in input parent.generate(8) # Wallet in sidechain2 gets funds instead of sidechain raw_pegin = sidechain2.createrawpegin( raw, proof, addrs["claim_script"])["hex"] # First node should also be able to make a valid transaction with or without 3rd arg # since this wallet originated the claim_script itself sidechain.createrawpegin(raw, proof, addrs["claim_script"]) sidechain.createrawpegin(raw, proof) signed_pegin = sidechain.signrawtransactionwithwallet( raw_pegin) assert signed_pegin["complete"] assert "warning" in signed_pegin # warning for immature peg-in # fully mature them now parent.generate(1) pegtxs += [sidechain.sendrawtransaction(signed_pegin["hex"])] for node_group in self.node_groups: self.sync_all(node_group) sidechain2.generate(1) for i, pegtxid in enumerate(pegtxs): if i % 2 == 0: tx = sidechain.gettransaction(pegtxid) else: tx = sidechain2.gettransaction(pegtxid) if "confirmations" not in tx or tx["confirmations"] == 0: raise Exception("Peg-in confirmation has failed.") print("Test pegouts") self.test_pegout(get_new_unconfidential_address(parent, "legacy"), sidechain) self.test_pegout(get_new_unconfidential_address(parent, "p2sh-segwit"), sidechain) self.test_pegout(get_new_unconfidential_address(parent, "bech32"), sidechain) print("Test pegout P2SH") parent_chain_addr = get_new_unconfidential_address(parent) parent_pubkey = parent.getaddressinfo(parent_chain_addr)["pubkey"] parent_chain_p2sh_addr = parent.createmultisig( 1, [parent_pubkey])["address"] self.test_pegout(parent_chain_p2sh_addr, sidechain) print("Test pegout Garbage") parent_chain_addr = "garbage" try: self.test_pegout(parent_chain_addr, sidechain) raise Exception("A garbage address should fail.") except JSONRPCException as e: assert "Invalid Bitcoin address" in e.error["message"] print("Test pegout Garbage valid") prev_txid = sidechain.sendtoaddress(sidechain.getnewaddress(), 1) sidechain.generate(1) pegout_chain = 'a' * 64 pegout_hex = 'b' * 500 inputs = [{"txid": prev_txid, "vout": 0}] outputs = {"vdata": [pegout_chain, pegout_hex]} rawtx = sidechain.createrawtransaction(inputs, outputs) raw_pegout = sidechain.decoderawtransaction(rawtx) assert 'vout' in raw_pegout and len(raw_pegout['vout']) > 0 pegout_tested = False for output in raw_pegout['vout']: scriptPubKey = output['scriptPubKey'] if 'type' in scriptPubKey and scriptPubKey['type'] == 'nulldata': assert 'pegout_hex' in scriptPubKey and 'pegout_asm' in scriptPubKey and 'pegout_type' in scriptPubKey assert 'pegout_chain' in scriptPubKey and 'pegout_reqSigs' not in scriptPubKey and 'pegout_addresses' not in scriptPubKey assert scriptPubKey['pegout_type'] == 'nonstandard' assert scriptPubKey['pegout_chain'] == pegout_chain assert scriptPubKey['pegout_hex'] == pegout_hex pegout_tested = True break assert pegout_tested print( "Now test failure to validate peg-ins based on intermittent bitcoind rpc failure" ) self.stop_node(1) txid = parent.sendtoaddress(addr, 1) parent.generate(12) proof = parent.gettxoutproof([txid]) raw = parent.gettransaction(txid)["hex"] sidechain.claimpegin(raw, proof) # stuck peg sidechain.generate(1) print("Waiting to ensure block is being rejected by sidechain2") time.sleep(5) assert sidechain.getblockcount() != sidechain2.getblockcount() print("Restarting parent2") self.start_node(1) self.connect_nodes(0, 1) # Don't make a block, race condition when pegin-invalid block # is awaiting further validation, nodes reject subsequent blocks # even ones they create print( "Now waiting for node to re-evaluate peg-in witness failed block... should take a few seconds" ) for node_group in self.node_groups: self.sync_all(node_group) print("Completed!\n") print("Now send funds out in two stages, partial, and full") some_btc_addr = get_new_unconfidential_address(parent) bal_1 = sidechain.getwalletinfo()["balance"]['bitcoin'] try: sidechain.sendtomainchain(some_btc_addr, bal_1 + 1) raise Exception("Sending out too much; should have failed") except JSONRPCException as e: assert "Insufficient funds" in e.error["message"] assert sidechain.getwalletinfo()["balance"]["bitcoin"] == bal_1 try: sidechain.sendtomainchain(some_btc_addr + "b", bal_1 - 1) raise Exception("Sending to invalid address; should have failed") except JSONRPCException as e: assert "Invalid Bitcoin address" in e.error["message"] assert sidechain.getwalletinfo()["balance"]["bitcoin"] == bal_1 try: sidechain.sendtomainchain("1Nro9WkpaKm9axmcfPVp79dAJU1Gx7VmMZ", bal_1 - 1) raise Exception( "Sending to mainchain address when should have been testnet; should have failed" ) except JSONRPCException as e: assert "Invalid Bitcoin address" in e.error["message"] assert sidechain.getwalletinfo()["balance"]["bitcoin"] == bal_1 # Test superfluous peg-in witness data on regular spend before we have no funds raw_spend = sidechain.createrawtransaction( [], {sidechain.getnewaddress(): 1}) fund_spend = sidechain.fundrawtransaction(raw_spend) sign_spend = sidechain.signrawtransactionwithwallet(fund_spend["hex"]) signed_struct = FromHex(CTransaction(), sign_spend["hex"]) # Non-witness tx has no witness serialized yet if len(signed_struct.wit.vtxinwit) == 0: signed_struct.wit.vtxinwit = [CTxInWitness()] signed_struct.wit.vtxinwit[ 0].peginWitness.stack = sample_pegin_witness.stack assert_equal( sidechain.testmempoolaccept([signed_struct.serialize().hex() ])[0]["allowed"], False) assert_equal( sidechain.testmempoolaccept([signed_struct.serialize().hex() ])[0]["reject-reason"], "extra-pegin-witness") signed_struct.wit.vtxinwit[0].peginWitness.stack = [b'\x00' * 100000 ] # lol assert_equal( sidechain.testmempoolaccept([signed_struct.serialize().hex() ])[0]["allowed"], False) assert_equal( sidechain.testmempoolaccept([signed_struct.serialize().hex() ])[0]["reject-reason"], "extra-pegin-witness") peg_out_txid = sidechain.sendtomainchain(some_btc_addr, 1) peg_out_details = sidechain.decoderawtransaction( sidechain.getrawtransaction(peg_out_txid)) # peg-out, change, fee assert len(peg_out_details["vout"]) == 3 found_pegout_value = False for output in peg_out_details["vout"]: if "value" in output and output["value"] == 1: found_pegout_value = True assert found_pegout_value bal_2 = sidechain.getwalletinfo()["balance"]["bitcoin"] # Make sure balance went down assert bal_2 + 1 < bal_1 # Send rest of coins using subtractfee from output arg sidechain.sendtomainchain(some_btc_addr, bal_2, True) assert sidechain.getwalletinfo()["balance"]['bitcoin'] == 0 print('Test coinbase peg-in maturity rules') # Have bitcoin output go directly into a claim output pegin_info = sidechain.getpeginaddress() mainchain_addr = pegin_info["mainchain_address"] # Watch the address so we can get tx without txindex parent.importaddress(mainchain_addr) claim_block = parent.generatetoaddress(50, mainchain_addr)[0] for node_group in self.node_groups: self.sync_all(node_group) block_coinbase = parent.getblock(claim_block, 2)["tx"][0] claim_txid = block_coinbase["txid"] claim_tx = block_coinbase["hex"] claim_proof = parent.gettxoutproof([claim_txid], claim_block) # Can't claim something even though it has 50 confirms since it's coinbase assert_raises_rpc_error( -8, "Peg-in Bitcoin transaction needs more confirmations to be sent.", sidechain.claimpegin, claim_tx, claim_proof) # If done via raw API, still doesn't work coinbase_pegin = sidechain.createrawpegin(claim_tx, claim_proof) assert_equal(coinbase_pegin["mature"], False) signed_pegin = sidechain.signrawtransactionwithwallet( coinbase_pegin["hex"])["hex"] assert_raises_rpc_error( -26, "bad-pegin-witness, Needs more confirmations.", sidechain.sendrawtransaction, signed_pegin) # 50 more blocks to allow wallet to make it succeed by relay and consensus parent.generatetoaddress(50, parent.getnewaddress()) for node_group in self.node_groups: self.sync_all(node_group) # Wallet still doesn't want to for 2 more confirms assert_equal( sidechain.createrawpegin(claim_tx, claim_proof)["mature"], False) # But we can just shoot it off claim_txid = sidechain.sendrawtransaction(signed_pegin) sidechain.generatetoaddress(1, sidechain.getnewaddress()) for node_group in self.node_groups: self.sync_all(node_group) assert_equal(sidechain.gettransaction(claim_txid)["confirmations"], 1) # Test a confidential pegin. print("Performing a confidential pegin.") # start pegin pegin_addrs = sidechain.getpeginaddress() assert_equal( sidechain.decodescript(pegin_addrs["claim_script"])["type"], "witness_v0_keyhash") pegin_addr = addrs["mainchain_address"] txid_fund = parent.sendtoaddress(pegin_addr, 10) # 10+2 confirms required to get into mempool and confirm parent.generate(11) for node_group in self.node_groups: self.sync_all(node_group) proof = parent.gettxoutproof([txid_fund]) assert_equal(sidechain.gettransaction(claim_txid)["confirmations"], 1) # Test a confidential pegin. print("Performing a confidential pegin.") # start pegin pegin_addrs = sidechain.getpeginaddress() assert_equal( sidechain.decodescript(pegin_addrs["claim_script"])["type"], "witness_v0_keyhash") pegin_addr = addrs["mainchain_address"] txid_fund = parent.sendtoaddress(pegin_addr, 10) # 10+2 confirms required to get into mempool and confirm parent.generate(11) for node_group in self.node_groups: self.sync_all(node_group) proof = parent.gettxoutproof([txid_fund]) raw = parent.gettransaction(txid_fund)["hex"] raw_pegin = sidechain.createrawpegin(raw, proof)['hex'] pegin = FromHex(CTransaction(), raw_pegin) # add new blinding pubkey for the pegin output pegin.vout[0].nNonce = CTxOutNonce( hex_str_to_bytes( sidechain.getaddressinfo(sidechain.getnewaddress( "", "blech32"))["confidential_key"])) # now add an extra input and output from listunspent; we need a blinded output for this blind_addr = sidechain.getnewaddress("", "blech32") sidechain.sendtoaddress(blind_addr, 15) sidechain.generate(6) # Make sure sidechain2 knows about the same input for node_group in self.node_groups: self.sync_all(node_group) unspent = [ u for u in sidechain.listunspent(6, 6) if u["amount"] == 15 ][0] assert (unspent["spendable"]) assert ("amountcommitment" in unspent) pegin.vin.append( CTxIn(COutPoint(int(unspent["txid"], 16), unspent["vout"]))) # insert corresponding output before fee output new_destination = sidechain.getaddressinfo( sidechain.getnewaddress("", "blech32")) new_dest_script_pk = hex_str_to_bytes(new_destination["scriptPubKey"]) new_dest_nonce = CTxOutNonce( hex_str_to_bytes(new_destination["confidential_key"])) new_dest_asset = pegin.vout[0].nAsset pegin.vout.insert( 1, CTxOut( int(unspent["amount"] * COIN) - 10000, new_dest_script_pk, new_dest_asset, new_dest_nonce)) # add the 10 ksat fee pegin.vout[2].nValue.setToAmount(pegin.vout[2].nValue.getAmount() + 10000) pegin_hex = ToHex(pegin) # test with both blindraw and rawblindraw raw_pegin_blinded1 = sidechain.blindrawtransaction(pegin_hex) raw_pegin_blinded2 = sidechain.rawblindrawtransaction( pegin_hex, ["", unspent["amountblinder"]], [10, 15], [unspent["asset"]] * 2, ["", unspent["assetblinder"]], "", False) pegin_signed1 = sidechain.signrawtransactionwithwallet( raw_pegin_blinded1) pegin_signed2 = sidechain.signrawtransactionwithwallet( raw_pegin_blinded2) for pegin_signed in [pegin_signed1, pegin_signed2]: final_decoded = sidechain.decoderawtransaction(pegin_signed["hex"]) assert (final_decoded["vin"][0]["is_pegin"]) assert (not final_decoded["vin"][1]["is_pegin"]) assert ("assetcommitment" in final_decoded["vout"][0]) assert ("valuecommitment" in final_decoded["vout"][0]) assert ("commitmentnonce" in final_decoded["vout"][0]) assert ("value" not in final_decoded["vout"][0]) assert ("asset" not in final_decoded["vout"][0]) assert (final_decoded["vout"][0]["commitmentnonce_fully_valid"]) assert ("assetcommitment" in final_decoded["vout"][1]) assert ("valuecommitment" in final_decoded["vout"][1]) assert ("commitmentnonce" in final_decoded["vout"][1]) assert ("value" not in final_decoded["vout"][1]) assert ("asset" not in final_decoded["vout"][1]) assert (final_decoded["vout"][1]["commitmentnonce_fully_valid"]) assert ("value" in final_decoded["vout"][2]) assert ("asset" in final_decoded["vout"][2]) # check that it is accepted in either mempool accepted = sidechain.testmempoolaccept([pegin_signed["hex"]])[0] if not accepted["allowed"]: raise Exception(accepted["reject-reason"]) accepted = sidechain2.testmempoolaccept([pegin_signed["hex"]])[0] if not accepted["allowed"]: raise Exception(accepted["reject-reason"]) print("Blinded transaction looks ok!" ) # need this print to distinguish failures in for loop print('Success!') # Manually stop sidechains first, then the parent chains. self.stop_node(2) self.stop_node(3) self.stop_node(0) self.stop_node(1)
def run_test(self): self.nodes[0].generate(161) #block 161 self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({}) assert(tmpl['sizelimit'] == 1000000) assert('weightlimit' not in tmpl) assert(tmpl['sigoplimit'] == 20000) assert(tmpl['transactions'][0]['hash'] == txid) assert(tmpl['transactions'][0]['sigops'] == 2) tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']}) assert(tmpl['sizelimit'] == 1000000) assert('weightlimit' not in tmpl) assert(tmpl['sigoplimit'] == 20000) assert(tmpl['transactions'][0]['hash'] == txid) assert(tmpl['transactions'][0]['sigops'] == 2) self.nodes[0].generate(1) #block 162 balance_presetup = self.nodes[0].getbalance() self.pubkey = [] p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness for i in range(3): newaddress = self.nodes[i].getnewaddress() self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"]) multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG]) p2sh_addr = self.nodes[i].addwitnessaddress(newaddress) bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False) p2sh_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'p2sh-segwit')['address'] bip173_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'bech32')['address'] assert_equal(p2sh_addr, key_to_p2sh_p2wpkh(self.pubkey[-1])) assert_equal(bip173_addr, key_to_p2wpkh(self.pubkey[-1])) assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript)) assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript)) p2sh_ids.append([]) wit_ids.append([]) for v in range(2): p2sh_ids[i].append([]) wit_ids[i].append([]) for i in range(5): for n in range(3): for v in range(2): wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999"))) p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999"))) self.nodes[0].generate(1) #block 163 sync_blocks(self.nodes) # Make sure all nodes recognize the transactions as theirs assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50) assert_equal(self.nodes[1].getbalance(), 20*Decimal("49.999")) assert_equal(self.nodes[2].getbalance(), 20*Decimal("49.999")) self.nodes[0].generate(260) #block 423 sync_blocks(self.nodes) self.log.info("Verify witness txs are skipped for mining before the fork") self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424 self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425 self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426 self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427 self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid") self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False) self.nodes[2].generate(4) # blocks 428-431 self.log.info("Verify previous witness txs skipped for mining can now be mined") assert_equal(len(self.nodes[2].getrawmempool()), 4) block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3) sync_blocks(self.nodes) assert_equal(len(self.nodes[2].getrawmempool()), 0) segwit_tx_list = self.nodes[2].getblock(block[0])["tx"] assert_equal(len(segwit_tx_list), 5) self.log.info("Verify default node can't accept txs with missing witness") # unsigned, no scriptsig self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False) # unsigned with redeem script self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0])) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0])) self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag") assert(self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False)) assert(self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False)) for i in range(len(segwit_tx_list)): tx = FromHex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i])) assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i])) assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness())) self.log.info("Verify witness txs without witness data are invalid after the fork") self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', wit_ids[NODE_2][WIT_V0][2], sign=False) self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', wit_ids[NODE_2][WIT_V1][2], sign=False) self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2])) self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2])) self.log.info("Verify default node can now use witness txs") self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432 self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433 self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434 self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435 self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']}) assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data assert(tmpl['weightlimit'] == 4000000) assert(tmpl['sigoplimit'] == 80000) assert(tmpl['transactions'][0]['txid'] == txid) assert(tmpl['transactions'][0]['sigops'] == 8) self.nodes[0].generate(1) # Mine a block to clear the gbt cache self.log.info("Non-segwit miners are able to use GBT response after activation.") # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) -> # tx2 (segwit input, paying to a non-segwit output) -> # tx3 (non-segwit input, paying to a non-segwit output). # tx1 is allowed to appear in the block, but no others. txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996")) hex_tx = self.nodes[0].gettransaction(txid)['hex'] tx = FromHex(CTransaction(), hex_tx) assert(tx.wit.is_null()) # This should not be a segwit input assert(txid1 in self.nodes[0].getrawmempool()) # Now create tx2, which will spend from txid1. tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b'')) tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex'] txid2 = self.nodes[0].sendrawtransaction(tx2_hex) tx = FromHex(CTransaction(), tx2_hex) assert(not tx.wit.is_null()) # Now create tx3, which will spend from txid2 tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b"")) tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee tx.calc_sha256() txid3 = self.nodes[0].sendrawtransaction(ToHex(tx)) assert(tx.wit.is_null()) assert(txid3 in self.nodes[0].getrawmempool()) # Now try calling getblocktemplate() without segwit support. template = self.nodes[0].getblocktemplate() # Check that tx1 is the only transaction of the 3 in the template. template_txids = [ t['txid'] for t in template['transactions'] ] assert(txid2 not in template_txids and txid3 not in template_txids) assert(txid1 in template_txids) # Check that running with segwit support results in all 3 being included. template = self.nodes[0].getblocktemplate({"rules": ["segwit"]}) template_txids = [ t['txid'] for t in template['transactions'] ] assert(txid1 in template_txids) assert(txid2 in template_txids) assert(txid3 in template_txids) # Check that wtxid is properly reported in mempool entry assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True)) # Mine a block to clear the gbt cache again. self.nodes[0].generate(1) self.log.info("Verify behaviour of importaddress, addwitnessaddress and listunspent") # Some public keys to be used later pubkeys = [ "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97 "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66 "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ ] # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn") uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"] self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR") compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"] assert ((self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed'] == False)) assert ((self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed'] == True)) self.nodes[0].importpubkey(pubkeys[0]) compressed_solvable_address = [key_to_p2pkh(pubkeys[0])] self.nodes[0].importpubkey(pubkeys[1]) compressed_solvable_address.append(key_to_p2pkh(pubkeys[1])) self.nodes[0].importpubkey(pubkeys[2]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])] spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress spendable_after_importaddress = [] # These outputs should be seen after importaddress solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress solvable_anytime = [] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address']) unknown_address = ["mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"] # Test multisig_without_privkey # We have 2 public keys without private keys, use addmultisigaddress to add to wallet. # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address. multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address'] script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG]) solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL])) for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with compressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with compressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) # P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in uncompressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # p2sh multisig with uncompressed keys should always be spendable spendable_anytime.extend([p2sh]) # bare multisig can be watched and signed, but is not treated as ours solvable_after_importaddress.extend([bare]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be spendable spendable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): # Multisig without private is not seen after addmultisigaddress, but seen after importaddress [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh]) # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) for i in uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress solvable_after_importaddress.extend([bare, p2sh]) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # normal P2PKH and P2PK with uncompressed keys should always be seen solvable_anytime.extend([p2pkh, p2pk]) # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) # Witness output types with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) op1 = CScript([OP_1]) op0 = CScript([OP_0]) # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V unsolvable_address = ["mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0)] unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D") unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG]) unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)]) p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL]) p2wshop1 = CScript([OP_0, sha256(op1)]) unsolvable_after_importaddress.append(unsolvablep2pkh) unsolvable_after_importaddress.append(unsolvablep2wshp2pkh) unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script unsolvable_after_importaddress.append(p2wshop1) unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided unsolvable_after_importaddress.append(p2shop0) spendable_txid = [] solvable_txid = [] spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1)) self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0) importlist = [] for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): bare = hex_str_to_bytes(v['hex']) importlist.append(bytes_to_hex_str(bare)) importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)]))) else: pubkey = hex_str_to_bytes(v['pubkey']) p2pk = CScript([pubkey, OP_CHECKSIG]) p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG]) importlist.append(bytes_to_hex_str(p2pk)) importlist.append(bytes_to_hex_str(p2pkh)) importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)]))) importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)]))) importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)]))) importlist.append(bytes_to_hex_str(unsolvablep2pkh)) importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh)) importlist.append(bytes_to_hex_str(op1)) importlist.append(bytes_to_hex_str(p2wshop1)) for i in importlist: # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC # exceptions and continue. try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True) self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) self.mine_and_test_listunspent(unseen_anytime, 0) # addwitnessaddress should refuse to return a witness address if an uncompressed key is used # note that no witness address should be returned by unsolvable addresses for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address: assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) # addwitnessaddress should return a witness addresses even if keys are not in the wallet self.nodes[0].addwitnessaddress(multisig_without_privkey_address) for i in compressed_spendable_address + compressed_solvable_address: witaddress = self.nodes[0].addwitnessaddress(i) # addwitnessaddress should return the same address if it is a known P2SH-witness address assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress)) spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1)) self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) self.mine_and_test_listunspent(unseen_anytime, 0) # Repeat some tests. This time we don't add witness scripts with importaddress # Import a compressed key and an uncompressed key, generate some multisig addresses self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH") uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"] self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw") compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"] self.nodes[0].importpubkey(pubkeys[5]) compressed_solvable_address = [key_to_p2pkh(pubkeys[5])] self.nodes[0].importpubkey(pubkeys[6]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])] spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress solvable_after_addwitnessaddress=[] # These outputs should be seen after importaddress but not spendable unseen_anytime = [] # These outputs should never be seen solvable_anytime = [] # These outputs should be solvable after importpubkey unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address']) compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address']) uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address']) compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address']) premature_witaddress = [] for i in compressed_spendable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh]) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH are always spendable spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in uncompressed_spendable_address + uncompressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen unseen_anytime.extend([p2wsh, p2sh_p2wsh]) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen unseen_anytime.extend([p2wpkh, p2sh_p2wpkh]) for i in compressed_solvable_address: v = self.nodes[0].getaddressinfo(i) if (v['isscript']): # P2WSH multisig without private key are seen after addwitnessaddress [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh]) premature_witaddress.append(script_to_p2sh(p2wsh)) else: [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) # P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable solvable_anytime.extend([p2wpkh, p2sh_p2wpkh]) self.mine_and_test_listunspent(spendable_anytime, 2) self.mine_and_test_listunspent(solvable_anytime, 1) self.mine_and_test_listunspent(spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0) # addwitnessaddress should refuse to return a witness address if an uncompressed key is used # note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress # premature_witaddress are not accepted until the script is added with addwitnessaddress first for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress: # This will raise an exception assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) # after importaddress it should pass addwitnessaddress v = self.nodes[0].getaddressinfo(compressed_solvable_address[1]) self.nodes[0].importaddress(v['hex'],"",False,True) for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress: witaddress = self.nodes[0].addwitnessaddress(i) assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress)) spendable_txid.append(self.mine_and_test_listunspent(spendable_after_addwitnessaddress + spendable_anytime, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress + solvable_anytime, 1)) self.mine_and_test_listunspent(unseen_anytime, 0) # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works v1_addr = program_to_witness(1, [3,5]) v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])],{v1_addr: 1}) v1_decoded = self.nodes[1].decoderawtransaction(v1_tx) assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr) assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305") # Check that spendable outputs are really spendable self.create_and_mine_tx_from_txids(spendable_txid) # import all the private keys so solvable addresses become spendable self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb") self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97") self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV") self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd") self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66") self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K") self.create_and_mine_tx_from_txids(solvable_txid) # Test that importing native P2WPKH/P2WSH scripts works for use_p2wsh in [False, True]: if use_p2wsh: scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a" transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000" else: scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87" transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000" self.nodes[1].importaddress(scriptPubKey, "", False) rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex'] rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"] txid = self.nodes[1].sendrawtransaction(rawtxfund) assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid) # Assert it is properly saved self.stop_node(1) self.start_node(1) assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
def FromHex(obj, hex_string): """Deserialize from a hex string representation (eg from RPC)""" obj.deserialize(BytesIO(hex_str_to_bytes(hex_string))) return obj
def run_test(self): self.url = urllib.parse.urlparse(self.nodes[0].url) self.log.info("Mine blocks and send Bitcoin to node 1") # Random address so node1's balance doesn't increase not_related_address = "2MxqoHEdNQTyYeX1mHcbrrpzgojbosTpCvJ" self.nodes[0].generate(1) self.sync_all() self.nodes[1].generatetoaddress(100, not_related_address) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50) txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) self.sync_all() self.nodes[1].generatetoaddress(1, not_related_address) self.sync_all() bb_hash = self.nodes[0].getbestblockhash() assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) self.log.info("Load the transaction using the /tx URI") json_obj = self.test_rest_request("/tx/{}".format(txid)) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # get the vin to later check for utxo (should be spent by then) # get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) self.log.info("Query an unspent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is one utxo assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['utxos'][0]['value'], Decimal('0.1')) self.log.info("Query a spent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is no utxo in the response because this outpoint has been spent assert_equal(len(json_obj['utxos']), 0) # Check bitmap assert_equal(json_obj['bitmap'], "0") self.log.info("Query two TXOs using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}/{}-{}".format(*(spending + spent))) assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['bitmap'], "10") self.log.info("Query the TXOs using the /getutxos URI with a binary response") bin_request = b'\x01\x02' for txid, n in [spending, spent]: bin_request += hex_str_to_bytes(txid) bin_request += pack("i", n) bin_response = self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body=bin_request, ret_type=RetType.BYTES) output = BytesIO(bin_response) chain_height, = unpack("i", output.read(4)) response_hash = binascii.hexlify(output.read(32)[::-1]).decode('ascii') assert_equal(bb_hash, response_hash) # check if getutxo's chaintip during calculation was fine assert_equal(chain_height, 102) # chain height must be 102 self.log.info("Test the /getutxos URI with and without /checkmempool") # Create a transaction, check that it's found with /checkmempool, but # not found without. Then confirm the transaction and check that it's # found with or without /checkmempool. # do a tx and don't sync txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) json_obj = self.test_rest_request("/tx/{}".format(txid)) # get the spent output to later check for utxo (should be spent by then) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 0) json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 0) self.nodes[0].generate(1) self.sync_all() json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) # Do some invalid requests self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.JSON, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ) # Test limits long_uri = '/'.join(["{}-{}".format(txid, n_) for n_ in range(20)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=400, ret_type=RetType.OBJ) long_uri = '/'.join(['{}-{}'.format(txid, n_) for n_ in range(15)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200) self.nodes[0].generate(1) # generate block to not affect upcoming tests self.sync_all() self.log.info("Test the /block, /blockhashbyheight and /headers URIs") bb_hash = self.nodes[0].getbestblockhash() # Check result if block does not exists assert_equal(self.test_rest_request('/headers/1/0000000000000000000000000000000000000000000000000000000000000000'), []) self.test_rest_request('/block/0000000000000000000000000000000000000000000000000000000000000000', status=404, ret_type=RetType.OBJ) # Check result if block is not in the active chain self.nodes[0].invalidateblock(bb_hash) assert_equal(self.test_rest_request('/headers/1/{}'.format(bb_hash)), []) self.test_rest_request('/block/{}'.format(bb_hash)) self.nodes[0].reconsiderblock(bb_hash) # Check binary format response = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_greater_than(int(response.getheader('content-length')), 80) response_bytes = response.read() # Compare with block header response_header = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_equal(int(response_header.getheader('content-length')), 80) response_header_bytes = response_header.read() assert_equal(response_bytes[:80], response_header_bytes) # Check block hex format response_hex = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than(int(response_hex.getheader('content-length')), 160) response_hex_bytes = response_hex.read().strip(b'\n') assert_equal(binascii.hexlify(response_bytes), response_hex_bytes) # Compare with hex block header response_header_hex = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than(int(response_header_hex.getheader('content-length')), 160) response_header_hex_bytes = response_header_hex.read(160) assert_equal(binascii.hexlify(response_bytes[:80]), response_header_hex_bytes) # Check json format block_json_obj = self.test_rest_request("/block/{}".format(bb_hash)) assert_equal(block_json_obj['hash'], bb_hash) assert_equal(self.test_rest_request("/blockhashbyheight/{}".format(block_json_obj['height']))['blockhash'], bb_hash) # Check hex/bin format resp_hex = self.test_rest_request("/blockhashbyheight/{}".format(block_json_obj['height']), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_equal(resp_hex.read().decode('utf-8').rstrip(), bb_hash) resp_bytes = self.test_rest_request("/blockhashbyheight/{}".format(block_json_obj['height']), req_type=ReqType.BIN, ret_type=RetType.BYTES) blockhash = binascii.hexlify(resp_bytes[::-1]).decode('utf-8') assert_equal(blockhash, bb_hash) # Check invalid blockhashbyheight requests resp = self.test_rest_request("/blockhashbyheight/abc", ret_type=RetType.OBJ, status=400) assert_equal(resp.read().decode('utf-8').rstrip(), "Invalid height: abc") resp = self.test_rest_request("/blockhashbyheight/1000000", ret_type=RetType.OBJ, status=404) assert_equal(resp.read().decode('utf-8').rstrip(), "Block height out of range") resp = self.test_rest_request("/blockhashbyheight/-1", ret_type=RetType.OBJ, status=400) assert_equal(resp.read().decode('utf-8').rstrip(), "Invalid height: -1") self.test_rest_request("/blockhashbyheight/", ret_type=RetType.OBJ, status=400) # Compare with json block header json_obj = self.test_rest_request("/headers/1/{}".format(bb_hash)) assert_equal(len(json_obj), 1) # ensure that there is one header in the json response assert_equal(json_obj[0]['hash'], bb_hash) # request/response hash should be the same # Compare with normal RPC block response rpc_block_json = self.nodes[0].getblock(bb_hash) for key in ['hash', 'confirmations', 'height', 'version', 'merkleroot', 'time', 'nonce', 'bits', 'difficulty', 'chainwork', 'previousblockhash']: assert_equal(json_obj[0][key], rpc_block_json[key]) # See if we can get 5 headers in one response self.nodes[1].generate(5) self.sync_all() json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash)) assert_equal(len(json_obj), 5) # now we should have 5 header objects self.log.info("Test the /tx URI") tx_hash = block_json_obj['tx'][0]['txid'] json_obj = self.test_rest_request("/tx/{}".format(tx_hash)) assert_equal(json_obj['txid'], tx_hash) # Check hex format response hex_response = self.test_rest_request("/tx/{}".format(tx_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than_or_equal(int(hex_response.getheader('content-length')), json_obj['size']*2) self.log.info("Test tx inclusion in the /mempool and /block URIs") # Make 3 tx and mine them on node 1 txs = [] txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) self.sync_all() # Check that there are exactly 3 transactions in the TX memory pool before generating the block json_obj = self.test_rest_request("/mempool/info") assert_equal(json_obj['size'], 3) # the size of the memory pool should be greater than 3x ~100 bytes assert_greater_than(json_obj['bytes'], 300) # Check that there are our submitted transactions in the TX memory pool json_obj = self.test_rest_request("/mempool/contents") for i, tx in enumerate(txs): assert tx in json_obj assert_equal(json_obj[tx]['spentby'], txs[i + 1:i + 2]) assert_equal(json_obj[tx]['depends'], txs[i - 1:i]) # Now mine the transactions newblockhash = self.nodes[1].generate(1) self.sync_all() # Check if the 3 tx show up in the new block json_obj = self.test_rest_request("/block/{}".format(newblockhash[0])) non_coinbase_txs = {tx['txid'] for tx in json_obj['tx'] if 'coinbase' not in tx['vin'][0]} assert_equal(non_coinbase_txs, set(txs)) # Check the same but without tx details json_obj = self.test_rest_request("/block/notxdetails/{}".format(newblockhash[0])) for tx in txs: assert tx in json_obj['tx'] self.log.info("Test the /chaininfo URI") bb_hash = self.nodes[0].getbestblockhash() json_obj = self.test_rest_request("/chaininfo") assert_equal(json_obj['bestblockhash'], bb_hash)
def run_test(self): self.log.info('prepare some coins for multiple *rawtransaction commands') self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(101) self.sync_all() self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0) self.sync_all() self.nodes[0].generate(5) self.sync_all() self.log.info('Test getrawtransaction on genesis block coinbase returns an error') block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot']) self.log.info('Check parameter types and required parameters of createrawtransaction') # Test `createrawtransaction` required parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction) assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, []) # Test `createrawtransaction` invalid extra parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo') # Test `createrawtransaction` invalid `inputs` txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {}) assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {}) assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {}) assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {}) assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {}) assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {}) # Test `createrawtransaction` invalid `outputs` address = self.nodes[0].getnewaddress() address2 = self.nodes[0].getnewaddress() assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo') self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility self.nodes[0].createrawtransaction(inputs=[], outputs=[]) assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'}) assert_raises_rpc_error(-5, "Invalid Rhombus address", self.nodes[0].createrawtransaction, [], {'foo': 0}) assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'}) assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1}) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)])) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")])) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}]) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']]) # Test `createrawtransaction` invalid `locktime` assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo') assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1) assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296) # Test `createrawtransaction` invalid `replaceable` assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo') self.log.info('Check that createrawtransaction accepts an array and object as outputs') tx = CTransaction() # One output tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99})))) assert_equal(len(tx.vout), 1) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]), ) # Two outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)]))))) assert_equal(len(tx.vout), 2) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]), ) # Multiple mixed outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')]))))) assert_equal(len(tx.vout), 3) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]), ) for type in ["bech32", "p2sh-segwit", "legacy"]: addr = self.nodes[0].getnewaddress("", type) addrinfo = self.nodes[0].getaddressinfo(addr) pubkey = addrinfo["scriptPubKey"] self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type)) # Test `signrawtransactionwithwallet` invalid `prevtxs` inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1) succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) assert succ["complete"] if type == "legacy": del prevtx["amount"] succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) assert succ["complete"] if type != "legacy": assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "vout": 3, } ]) assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "scriptPubKey": pubkey, "vout": 3, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "vout": 3, "amount": 1 } ]) ######################################### # sendrawtransaction with missing input # ######################################### self.log.info('sendrawtransaction with missing input') inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists outputs = { self.nodes[0].getnewaddress() : 4.998 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx) # This will raise an exception since there are missing inputs assert_raises_rpc_error(-25, "bad-txns-inputs-missingorspent", self.nodes[2].sendrawtransaction, rawtx['hex']) ##################################### # getrawtransaction with block hash # ##################################### # make a tx by sending then generate 2 blocks; block1 has the tx in it tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) block1, block2 = self.nodes[2].generate(2) self.sync_all() # We should be able to get the raw transaction by providing the correct block gottx = self.nodes[0].getrawtransaction(tx, True, block1) assert_equal(gottx['txid'], tx) assert_equal(gottx['in_active_chain'], True) # We should not have the 'in_active_chain' flag when we don't provide a block gottx = self.nodes[0].getrawtransaction(tx, True) assert_equal(gottx['txid'], tx) assert 'in_active_chain' not in gottx # We should not get the tx if we provide an unrelated block assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2) # An invalid block hash should raise the correct errors assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True) assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar") assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234") assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000") assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000") # Undo the blocks and check in_active_chain self.nodes[0].invalidateblock(block1) gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1) assert_equal(gottx['in_active_chain'], False) self.nodes[0].reconsiderblock(block1) assert_equal(self.nodes[0].getbestblockhash(), block2) ######################### # RAW TX MULTISIG TESTS # ######################### # 2of2 test addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) # Tests for createmultisig and addmultisigaddress assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"]) self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here. mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address'] #use balance deltas instead of absolute values bal = self.nodes[2].getbalance() # send 1.2 BTC to msig adr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance # 2of3 test from different nodes bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr3 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) addr3Obj = self.nodes[2].getaddressinfo(addr3) mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address'] txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() #THIS IS AN INCOMPLETE FEATURE #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable txDetails = self.nodes[0].gettransaction(txId, True) rawTx = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000')) bal = self.nodes[0].getbalance() inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}] outputs = { self.nodes[0].getnewaddress() : 2.19 } rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs) assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs) assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys self.nodes[2].sendrawtransaction(rawTxSigned['hex']) rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx # 2of2 test for combining transactions bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObjValid = self.nodes[2].getaddressinfo(mSigObj) txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable txDetails = self.nodes[0].gettransaction(txId, True) rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000')) bal = self.nodes[0].getbalance() inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}] outputs = { self.nodes[0].getnewaddress() : 2.19 } rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs) self.log.debug(rawTxPartialSigned1) assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs) self.log.debug(rawTxPartialSigned2) assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']]) self.log.debug(rawTxComb) self.nodes[2].sendrawtransaction(rawTxComb) rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx # decoderawtransaction tests # witness transaction encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000" decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction # non-witness transaction encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000" decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) # getrawtransaction tests # 1. valid parameters - only supply txid txId = rawTx["txid"] assert_equal(self.nodes[0].getrawtransaction(txId), rawTxSigned['hex']) # 2. valid parameters - supply txid and 0 for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex']) # 3. valid parameters - supply txid and False for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned['hex']) # 4. valid parameters - supply txid and 1 for verbose. # We only check the "hex" field of the output so we don't need to update this test every time the output format changes. assert_equal(self.nodes[0].getrawtransaction(txId, 1)["hex"], rawTxSigned['hex']) # 5. valid parameters - supply txid and True for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, True)["hex"], rawTxSigned['hex']) # 6. invalid parameters - supply txid and string "Flase" assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, "Flase") # 7. invalid parameters - supply txid and empty array assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, []) # 8. invalid parameters - supply txid and empty dict assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, {}) inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx= self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 1000) # 9. invalid parameters - sequence number out of range inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}] outputs = { self.nodes[0].getnewaddress() : 1 } assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) # 10. invalid parameters - sequence number out of range inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}] outputs = { self.nodes[0].getnewaddress() : 1 } assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx= self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 4294967294) #################################### # TRANSACTION VERSION NUMBER TESTS # #################################### # Test the minimum transaction version number that fits in a signed 32-bit integer. tx = CTransaction() tx.nVersion = -0x80000000 rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], -0x80000000) """ # Collides with rhombus version # Test the maximum transaction version number that fits in a signed 32-bit integer. tx = CTransaction() tx.nVersion = 0x7fffffff rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], 0x7fffffff) """ self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate') # Test a transaction with a small fee. txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) rawTx = self.nodes[0].getrawtransaction(txId, True) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000')) self.sync_all() inputs = [{ "txid" : txId, "vout" : vout['n'] }] # Fee 10,000 satoshis, (1 - (10000 sat * 0.00000001 BTC/sat)) = 0.9999 outputs = { self.nodes[0].getnewaddress() : Decimal("0.99990000") } rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) assert_equal(rawTxSigned['complete'], True) # Fee 10,000 satoshis, ~100 b transaction, fee rate should land around 100 sat/byte = 0.00100000 BTC/kB # Thus, testmempoolaccept should reject testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']], 0.00001000)[0] assert_equal(testres['allowed'], False) assert_equal(testres['reject-reason'], 'absurdly-high-fee') # and sendrawtransaction should throw assert_raises_rpc_error(-26, "absurdly-high-fee", self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000) # and the following calls should both succeed testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']])[0] assert_equal(testres['allowed'], True) self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex']) # Test a transaction with a large fee. txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) rawTx = self.nodes[0].getrawtransaction(txId, True) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000')) self.sync_all() inputs = [{ "txid" : txId, "vout" : vout['n'] }] # Fee 2,000,000 satoshis, (1 - (2000000 sat * 0.00000001 BTC/sat)) = 0.98 outputs = { self.nodes[0].getnewaddress() : Decimal("0.98000000") } rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) assert_equal(rawTxSigned['complete'], True) # Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 0.20000000 BTC/kB # Thus, testmempoolaccept should reject testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0] assert_equal(testres['allowed'], False) assert_equal(testres['reject-reason'], 'absurdly-high-fee') # and sendrawtransaction should throw assert_raises_rpc_error(-26, "absurdly-high-fee", self.nodes[2].sendrawtransaction, rawTxSigned['hex']) # and the following calls should both succeed testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']], maxfeerate='0.20000000')[0] assert_equal(testres['allowed'], True) self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'], maxfeerate='0.20000000')
def decoderawtransaction_asm_sighashtype(self): """Test decoding scripts via RPC command "decoderawtransaction". This test is in with the "decodescript" tests because they are testing the same "asm" script decodes. """ # this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000' rpc_result = self.nodes[0].decoderawtransaction(tx) assert_equal( '304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm']) # this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs. # it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc # verify that we have not altered scriptPubKey decoding. tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000' rpc_result = self.nodes[0].decoderawtransaction(tx) assert_equal( '8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid']) assert_equal( '0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm']) assert_equal( 'OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm']) assert_equal( 'OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm']) txSave = CTransaction() txSave.deserialize(BytesIO(hex_str_to_bytes(tx))) # make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000' rpc_result = self.nodes[0].decoderawtransaction(tx) assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm']) # verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000' rpc_result = self.nodes[0].decoderawtransaction(tx) assert_equal( 'OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm']) assert_equal( 'OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm']) # some more full transaction tests of varying specific scriptSigs. used instead of # tests in decodescript_script_sig because the decodescript RPC is specifically # for working on scriptPubKeys (argh!). push_signature = bytes_to_hex_str( txSave.vin[0].scriptSig)[2:(0x48 * 2 + 4)] signature = push_signature[2:] der_signature = signature[:-2] signature_sighash_decoded = der_signature + '[ALL]' signature_2 = der_signature + '82' push_signature_2 = '48' + signature_2 signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]' # 1) P2PK scriptSig txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature) rpc_result = self.nodes[0].decoderawtransaction( bytes_to_hex_str(txSave.serialize())) assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm']) # make sure that the sighash decodes come out correctly for a more complex / lesser used case. txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature_2) rpc_result = self.nodes[0].decoderawtransaction( bytes_to_hex_str(txSave.serialize())) assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm']) # 2) multisig scriptSig txSave.vin[0].scriptSig = hex_str_to_bytes('00' + push_signature + push_signature_2) rpc_result = self.nodes[0].decoderawtransaction( bytes_to_hex_str(txSave.serialize())) assert_equal( '0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm']) # 3) test a scriptSig that contains more than push operations. # in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it. txSave.vin[0].scriptSig = hex_str_to_bytes( '6a143011020701010101010101020601010101010101') rpc_result = self.nodes[0].decoderawtransaction( bytes_to_hex_str(txSave.serialize())) assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
def p2sh_address_to_script(self,v): bare = CScript(hex_str_to_bytes(v['hex'])) p2sh = CScript(hex_str_to_bytes(v['scriptPubKey'])) p2wsh = CScript([OP_0, sha256(bare)]) p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL]) return([bare, p2sh, p2wsh, p2sh_p2wsh])
def run_test(self): self.nodes[0].add_p2p_connection(P2PDataStore()) self.log.info("Generate blocks in the past for coinbase outputs.") start_time = 1510247077 + 600 * 1000 + 101 long_past_time = start_time - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future self.nodes[0].setmocktime( long_past_time - 100 ) # enough so that the generated blocks will still all be before long_past_time self.coinbase_blocks = self.nodes[0].generate( 1 + 16 + 2 * 32 + 1) # 82 blocks generated for inputs self.nodes[0].setmocktime( 0 ) # set time back to present so yielded blocks aren't in the future as we advance last_block_time self.tipheight = 82 # height of the next block to build self.last_block_time = long_past_time self.tip = int(self.nodes[0].getbestblockhash(), 16) self.nodeaddress = self.nodes[0].getnewaddress() self.log.info("Test that the csv softfork is DEFINED") assert_equal( get_bip9_status(self.nodes[0], 'csv')['status'], 'defined') test_blocks = self.generate_blocks(61, 4) # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0 # using a variety of bits to simulate multiple parallel softforks test_blocks = self.generate_blocks( 50, 536870913, test_blocks) # 0x20000001 (signalling ready) test_blocks = self.generate_blocks( 20, 4, test_blocks) # 0x00000004 (signalling not) test_blocks = self.generate_blocks( 50, 536871169, test_blocks) # 0x20000101 (signalling ready) test_blocks = self.generate_blocks( 24, 536936448, test_blocks) # 0x20010000 (signalling not) # 108 out of 144 signal bit 0 to achieve lock-in # using a variety of bits to simulate multiple parallel softforks test_blocks = self.generate_blocks( 58, 536870913, test_blocks) # 0x20000001 (signalling ready) test_blocks = self.generate_blocks( 26, 4, test_blocks) # 0x00000004 (signalling not) test_blocks = self.generate_blocks( 50, 536871169, test_blocks) # 0x20000101 (signalling ready) test_blocks = self.generate_blocks( 10, 536936448, test_blocks) # 0x20010000 (signalling not) # 140 more version 4 blocks test_blocks = self.generate_blocks(130, 4, test_blocks) extend_txs = [] # split 50 coinbases into 2 unspents so we have enough unspent txs for coinbase_block in self.coinbase_blocks[0:50]: amount = (INITIAL_BLOCK_REWARD - 0.01) / 2.0 addr_a = self.nodes[0].getnewaddress() addr_b = self.nodes[0].getnewaddress() inputs = [{ 'txid': self.nodes[0].getblock(coinbase_block)['tx'][0], 'vout': 0 }] outputs = {addr_a: amount, addr_b: amount} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) res = self.nodes[0].signrawtransactionwithwallet(rawtx) rawtx = res['hex'] tx = CTransaction() f = BytesIO(hex_str_to_bytes(rawtx)) tx.deserialize(f) extend_txs.append(tx) test_blocks = self.generate_blocks(10, 4, test_blocks, extend_txs=extend_txs) self.sync_blocks(test_blocks[0:61], request_block=True) # Advanced from DEFINED to STARTED, height = 143 print(self.nodes[0].getblockcount()) #self.log.info("Advance from DEFINED to STARTED, height = 143") assert_equal( get_bip9_status(self.nodes[0], 'csv')['status'], 'started') #self.log.info("Fail to achieve LOCKED_IN") self.sync_blocks(test_blocks[61:61 + 144]) # 2 # Failed to advance past STARTED, height = 287 assert_equal( get_bip9_status(self.nodes[0], 'csv')['status'], 'started') self.sync_blocks(test_blocks[61 + 144:61 + 144 + 144]) # 3 # Advanced from STARTED to LOCKED_IN, height = 431 assert_equal( get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in') self.sync_blocks(test_blocks[61 + 144 + 144:61 + 144 + 144 + 130]) # 4 self.sync_blocks(test_blocks[61 + 144 + 144 + 130:61 + 144 + 144 + 130 + 10]) # 4 self.nodes[0].generate(1) self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0) self.tipheight += 1 self.last_block_time += 600 self.unspents = [] for unspent in self.nodes[0].listunspent(): if unspent['spendable']: self.unspents.append( (unspent['txid'], unspent['vout'], unspent['amount'])) # Inputs at height = 572 # # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block) # Note we reuse inputs for v1 and v2 txs so must test these separately # 16 normal inputs bip68inputs = [] for i in range(16): bip68inputs.append( send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop(), self.nodeaddress)) # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) bip112basicinputs = [] for j in range(2): inputs = [] for i in range(16): inputs.append( send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop(), self.nodeaddress)) bip112basicinputs.append(inputs) # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig) bip112diverseinputs = [] for j in range(2): inputs = [] for i in range(16): inputs.append( send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop(), self.nodeaddress)) bip112diverseinputs.append(inputs) # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) bip112specialinput = send_generic_unspent_input_tx( self.nodes[0], self.unspents.pop(), self.nodeaddress) # 1 normal input bip113input = send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop(), self.nodeaddress) self.nodes[0].setmocktime(self.last_block_time + 600) inputblockhash = self.nodes[0].generate(1)[ 0] # 1 block generated for inputs to be in chain at height 572 self.nodes[0].setmocktime(0) self.tip = int(inputblockhash, 16) self.tipheight += 1 self.last_block_time += 600 assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1) # 2 more version 4 blocks test_blocks = self.generate_blocks(1, 4) self.sync_blocks(test_blocks) self.log.info( "Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)" ) assert_equal( get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in') # Test both version 1 and version 2 transactions for all tests # BIP113 test transaction will be modified before each use to put in appropriate block time bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98")) bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE bip113tx_v1.nVersion = 1 bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98")) bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE bip113tx_v2.nVersion = 2 # For BIP68 test all 16 relative sequence locktimes bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress) bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress) # For BIP112 test: # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress) bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress) # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs bip112txs_vary_nSequence_9_v1 = create_bip112txs( self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1) bip112txs_vary_nSequence_9_v2 = create_bip112txs( self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1) # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress) bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress) # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1) bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1) # -1 OP_CSV OP_DROP input bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress) bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress) self.log.info("TESTING") self.log.info("Pre-Soft Fork Tests. All txs should pass.") self.log.info("Test version 1 txs") success_txs = [] # add BIP113 tx and -1 CSV tx bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) success_txs.append(bip113signed1) success_txs.append(bip112tx_special_v1) # add BIP 68 txs success_txs.extend(all_rlt_txs(bip68txs_v1)) # add BIP 112 with seq=10 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1)) # try BIP 112 with seq=9 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)) self.sync_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Test version 2 txs") success_txs = [] # add BIP113 tx and -1 CSV tx bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) success_txs.append(bip113signed2) success_txs.append(bip112tx_special_v2) # add BIP 68 txs success_txs.extend(all_rlt_txs(bip68txs_v2)) # add BIP 112 with seq=10 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2)) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2)) # try BIP 112 with seq=9 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)) self.sync_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block test_blocks = self.generate_blocks(1, 4) self.sync_blocks(test_blocks) assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active') self.log.info("Post-Soft Fork Tests.") self.log.info("BIP 113 tests") # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) for bip113tx in [bip113signed1, bip113signed2]: self.sync_blocks([self.create_test_block([bip113tx])], success=False) # BIP 113 tests should now pass if the locktime is < MTP bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) for bip113tx in [bip113signed1, bip113signed2]: self.sync_blocks([self.create_test_block([bip113tx])]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Next block height = 580 after 4 blocks of random version test_blocks = self.generate_blocks(4, 1234) self.sync_blocks(test_blocks) self.log.info("BIP 68 tests") self.log.info("Test version 1 txs - all should still pass") success_txs = [] success_txs.extend(all_rlt_txs(bip68txs_v1)) self.sync_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Test version 2 txs") # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']] self.sync_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512 bip68timetxs = [ tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf'] ] for tx in bip68timetxs: self.sync_blocks([self.create_test_block([tx])], success=False) bip68heighttxs = [ tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf'] ] for tx in bip68heighttxs: self.sync_blocks([self.create_test_block([tx])], success=False) # Advance one block to 581 test_blocks = self.generate_blocks(1, 1234) self.sync_blocks(test_blocks) # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512 bip68success_txs.extend(bip68timetxs) self.sync_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) for tx in bip68heighttxs: self.sync_blocks([self.create_test_block([tx])], success=False) # Advance one block to 583 test_blocks = self.generate_blocks(2, 1234) self.sync_blocks(test_blocks) # All BIP 68 txs should pass bip68success_txs.extend(bip68heighttxs) self.sync_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("BIP 112 tests") self.log.info("Test version 1 txs") # -1 OP_CSV tx should fail self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass success_txs = [ tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf'] ] success_txs += [ tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf'] ] self.sync_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1) fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1) fail_txs += [ tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf'] ] fail_txs += [ tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf'] ] for tx in fail_txs: self.sync_blocks([self.create_test_block([tx])], success=False) self.log.info("Test version 2 txs") # -1 OP_CSV tx should fail self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met) success_txs = [ tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf'] ] success_txs += [ tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf'] ] self.sync_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ## # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2) fail_txs += [ tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf'] ] for tx in fail_txs: self.sync_blocks([self.create_test_block([tx])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail fail_txs = [ tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf'] ] for tx in fail_txs: self.sync_blocks([self.create_test_block([tx])], success=False) # If sequencelock types mismatch, tx should fail fail_txs = [ tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf'] ] fail_txs += [ tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf'] ] for tx in fail_txs: self.sync_blocks([self.create_test_block([tx])], success=False) # Remaining txs should pass, just test masking works properly success_txs = [ tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf'] ] success_txs += [ tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf'] ] self.sync_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Additional test, of checking that comparison of two time types works properly time_txs = [] for tx in [ tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf'] ]: tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG signtx = sign_transaction(self.nodes[0], tx) time_txs.append(signtx) self.sync_blocks([self.create_test_block(time_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
def test_2of2_multisig(self): """ Tests that holding a name in a 2-of-2 multisig address works as expected. One key holder alone cannot sign, but both together can update the name. This verifies basic usage of P2SH multisig for names. """ self.log.info("Testing name held by 2-of-2 multisig...") # Construct a 2-of-2 multisig address shared between two nodes. pubkeyA = self.getNewPubkey(0) pubkeyB = self.getNewPubkey(1) multisig = self.nodes[0].addmultisigaddress(2, [pubkeyA, pubkeyB]) multisig_ = self.nodes[1].addmultisigaddress(2, [pubkeyA, pubkeyB]) assert_equal(multisig["address"], multisig_["address"]) p2sh = multisig['address'] # Register a new name to that address. self.nodes[0].name_register("x/name", val("value"), {"destAddress": p2sh}) self.nodes[0].generate(10) data = self.checkName(0, "x/name", val("value")) assert_equal(data['address'], p2sh) # Straight-forward name updating should fail (for both nodes). assert_raises_rpc_error(-6, None, self.nodes[0].name_update, "x/name", val("new value")) assert_raises_rpc_error(-6, None, self.nodes[1].name_update, "x/name", val("new value")) # Find some other input to add as fee. unspents = self.nodes[0].listunspent() assert len(unspents) > 0 feeInput = unspents[0] changeAddr = self.nodes[0].getnewaddress() nameAmount = Decimal("0.01") changeAmount = feeInput['amount'] - nameAmount # Construct the name update as raw transaction. addr = self.nodes[1].getnewaddress() inputs = [{"txid": data['txid'], "vout": data['vout']}, feeInput] outputs = {changeAddr: changeAmount, addr: nameAmount} txRaw = self.nodes[0].createrawtransaction(inputs, outputs) op = {"op": "name_update", "name": "x/name", "value": val("it worked")} nameInd = self.rawtxOutputIndex(0, txRaw, addr) txRaw = self.nodes[0].namerawtransaction(txRaw, nameInd, op) # Sign it partially. partial = self.nodes[0].signrawtransactionwithwallet(txRaw['hex']) assert not partial['complete'] assert_raises_rpc_error(-26, None, self.nodes[0].sendrawtransaction, partial['hex']) # Sign it fully. signed = self.nodes[1].signrawtransactionwithwallet(partial['hex']) assert signed['complete'] tx = signed['hex'] # Manipulate the signature to invalidate it. This checks whether or # not the OP_MULTISIG is actually verified (vs just the script hash # compared to the redeem script). txData = bytearray(hex_str_to_bytes(tx)) txData[44] = (txData[44] + 10) % 256 txManipulated = txData.hex() # Send the tx. The manipulation should be caught (independently of # when strict P2SH checks are enabled, since they are enforced # mandatorily in the mempool). assert_raises_rpc_error(-26, None, self.nodes[0].sendrawtransaction, txManipulated) self.nodes[0].sendrawtransaction(tx) self.nodes[0].generate(1) self.sync_blocks() # Check that it was transferred correctly. self.checkName(1, "x/name", val("it worked")) self.nodes[1].name_update("x/name", val("changed")) self.nodes[1].generate(1) self.checkName(1, "x/name", val("changed"))
def run_test(self): self.url = urllib.parse.urlparse(self.nodes[0].url) self.log.info("Mine blocks and send Bitcoin to node 1") # Random address so node1's balance doesn't increase not_related_address = "2MxqoHEdNQTyYeX1mHcbrrpzgojbosTpCvJ" self.nodes[0].generate(1) self.sync_all() self.nodes[1].generatetoaddress(100, not_related_address) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50) txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) self.sync_all() self.log.info("Test the /tx URI") json_obj = self.test_rest_request("/tx/{}".format(txid)) assert_equal(json_obj['txid'], txid) # Check hex format response hex_response = self.test_rest_request("/tx/{}".format(txid), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than_or_equal(int(hex_response.getheader('content-length')), json_obj['size']*2) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # get the vin to later check for utxo (should be spent by then) # get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) self.log.info("Query an unspent TXO using the /getutxos URI") self.nodes[1].generatetoaddress(1, not_related_address) self.sync_all() bb_hash = self.nodes[0].getbestblockhash() assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) # Check chainTip response json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is one utxo assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['utxos'][0]['value'], Decimal('0.1')) self.log.info("Query a spent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is no utxo in the response because this outpoint has been spent assert_equal(len(json_obj['utxos']), 0) # Check bitmap assert_equal(json_obj['bitmap'], "0") self.log.info("Query two TXOs using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}/{}-{}".format(*(spending + spent))) assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['bitmap'], "10") self.log.info("Query the TXOs using the /getutxos URI with a binary response") bin_request = b'\x01\x02' for txid, n in [spending, spent]: bin_request += hex_str_to_bytes(txid) bin_request += pack("i", n) bin_response = self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body=bin_request, ret_type=RetType.BYTES) output = BytesIO(bin_response) chain_height, = unpack("i", output.read(4)) response_hash = output.read(32)[::-1].hex() assert_equal(bb_hash, response_hash) # check if getutxo's chaintip during calculation was fine assert_equal(chain_height, 102) # chain height must be 102 self.log.info("Test the /getutxos URI with and without /checkmempool") # Create a transaction, check that it's found with /checkmempool, but # not found without. Then confirm the transaction and check that it's # found with or without /checkmempool. # do a tx and don't sync txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) json_obj = self.test_rest_request("/tx/{}".format(txid)) # get the spent output to later check for utxo (should be spent by then) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 0) json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 0) self.nodes[0].generate(1) self.sync_all() json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) # Do some invalid requests self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.JSON, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ) # Test limits long_uri = '/'.join(["{}-{}".format(txid, n_) for n_ in range(20)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=400, ret_type=RetType.OBJ) long_uri = '/'.join(['{}-{}'.format(txid, n_) for n_ in range(15)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200) self.nodes[0].generate(1) # generate block to not affect upcoming tests self.sync_all() self.log.info("Test the /block and /headers URIs") bb_hash = self.nodes[0].getbestblockhash() # Check binary format response = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_greater_than(int(response.getheader('content-length')), BLOCK_HEADER_SIZE) response_bytes = response.read() # Compare with block header response_header = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_equal(int(response_header.getheader('content-length')), BLOCK_HEADER_SIZE) response_header_bytes = response_header.read() assert_equal(response_bytes[:BLOCK_HEADER_SIZE], response_header_bytes) # Check block hex format response_hex = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than(int(response_hex.getheader('content-length')), BLOCK_HEADER_SIZE*2) response_hex_bytes = response_hex.read().strip(b'\n') assert_equal(binascii.hexlify(response_bytes), response_hex_bytes) # Compare with hex block header response_header_hex = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than(int(response_header_hex.getheader('content-length')), BLOCK_HEADER_SIZE*2) response_header_hex_bytes = response_header_hex.read(BLOCK_HEADER_SIZE*2) assert_equal(binascii.hexlify(response_bytes[:BLOCK_HEADER_SIZE]), response_header_hex_bytes) # Check json format block_json_obj = self.test_rest_request("/block/{}".format(bb_hash)) assert_equal(block_json_obj['hash'], bb_hash)
def run_test(self): node = self.nodes[0] self.log.info('Start with empty mempool, and 200 blocks') self.mempool_size = 0 assert_equal(node.getblockcount(), 200) assert_equal(node.getmempoolinfo()['size'], self.mempool_size) coins = node.listunspent() self.log.info('Should not accept garbage to testmempoolaccept') assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar')) assert_raises_rpc_error(-8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22'])) assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar'])) self.log.info('A transaction already in the blockchain') coin = coins.pop() # Pick a random coin(base) to spend raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{'txid': coin['txid'], 'vout': coin['vout']}], outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}], ))['hex'] txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, maxfeerate=0) node.generate(1) self.mempool_size = 0 self.check_mempool_result( result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': '18: txn-already-known'}], rawtxs=[raw_tx_in_block], ) self.log.info('A transaction not in the mempool') fee = 0.00000700 raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}], # RBF is used later outputs=[{node.getnewaddress(): 0.3 - fee}], ))['hex'] tx = CTransaction() tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) txid_0 = tx.rehash() self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': True}], rawtxs=[raw_tx_0], ) self.log.info('A final transaction not in the mempool') coin = coins.pop() # Pick a random coin(base) to spend raw_tx_final = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff}], # SEQUENCE_FINAL outputs=[{node.getnewaddress(): 0.025}], locktime=node.getblockcount() + 2000, # Can be anything ))['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final))) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': True}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0) self.mempool_size += 1 self.log.info('A transaction in the mempool') node.sendrawtransaction(hexstring=raw_tx_0) self.mempool_size += 1 self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': '18: txn-already-in-mempool'}], rawtxs=[raw_tx_0], ) self.log.info('A transaction that replaces a mempool transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vout[0].nValue -= int(fee * COIN) # Double the fee tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1 # Now, opt out of RBF raw_tx_0 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) txid_0 = tx.rehash() self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': True}], rawtxs=[raw_tx_0], ) self.log.info('A transaction that conflicts with an unconfirmed tx') # Send the transaction that replaces the mempool transaction and opts out of replaceability node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0) # take original raw_tx_0 tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vout[0].nValue -= int(4 * fee * COIN) # Set more fee # skip re-signing the tx self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '18: txn-mempool-conflict'}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) self.log.info('A transaction with missing inputs, that never existed') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14) # skip re-signing the tx self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with missing inputs, that existed once in the past') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vin[0].prevout.n = 1 # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend raw_tx_1 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex'] txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0) # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them raw_tx_spend_both = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[ {'txid': txid_0, 'vout': 0}, {'txid': txid_1, 'vout': 0}, ], outputs=[{node.getnewaddress(): 0.1}] ))['hex'] txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, maxfeerate=0) node.generate(1) self.mempool_size = 0 # Now see if we can add the coins back to the utxo set by sending the exact txs again self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}], rawtxs=[raw_tx_0], ) self.check_mempool_result( result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}], rawtxs=[raw_tx_1], ) self.log.info('Create a signed "reference" tx for later use') raw_tx_reference = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{'txid': txid_spend_both, 'vout': 0}], outputs=[{node.getnewaddress(): 0.05}], ))['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) # Reference tx should be valid on itself self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': True}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with no outputs') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout = [] # Skip re-signing the transaction for context independent checks from now on # tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(tx.serialize().hex())['hex']))) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-empty'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A really large transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin = [tx.vin[0]] * math.ceil(MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize())) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-oversize'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with negative output value') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].nValue *= -1 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-negative'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with too large output value') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].nValue = 21000000 * COIN + 1 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-toolarge'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with too large sum of output values') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout = [tx.vout[0]] * 2 tx.vout[0].nValue = 21000000 * COIN self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-txouttotal-toolarge'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with duplicate inputs') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin = [tx.vin[0]] * 2 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-inputs-duplicate'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A coinbase transaction') # Pick the input of the first tx we signed, so it has to be a coinbase tx raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid']) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent))) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: coinbase'}], rawtxs=[tx.serialize().hex()], ) self.log.info('Some nonstandard transactions') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.nVersion = 3 # A version currently non-standard self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: version'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptpubkey'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].scriptSig = CScript([OP_HASH160]) # Some not-pushonly scriptSig self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptsig-not-pushonly'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript([OP_HASH160, hash160(b'burn'), OP_EQUAL])) num_scripts = 100000 // len(output_p2sh_burn.serialize()) # Use enough outputs to make the tx too large for our policy tx.vout = [output_p2sh_burn] * num_scripts self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: tx-size'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0] = output_p2sh_burn tx.vout[0].nValue -= 1 # Make output smaller, such that it is dust for our policy self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: dust'}], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff']) tx.vout = [tx.vout[0]] * 2 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: multi-op-return'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A timelocked transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].nSequence -= 1 # Should be non-max, so locktime is not ignored tx.nLockTime = node.getblockcount() + 1 self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-final'}], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction that is locked by BIP68 sequence logic') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].nSequence = 2 # We could include it in the second block mined from now, but not the very next one # Can skip re-signing the tx because of early rejection self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-BIP68-final'}], rawtxs=[tx.serialize().hex()], maxfeerate=0, )
def run_test(self): self.log.info("Mining blocks...") self.nodes[0].generate(1) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] node0_address1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) #Check only one address assert_equal(node0_address1['ismine'], True) #Node 1 sync test assert_equal(self.nodes[1].getblockcount(), 1) #Address Test - before import address_info = self.nodes[1].getaddressinfo(node0_address1['address']) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) # RPC importmulti ----------------------------------------------- # MyOriginalCoin Address self.log.info("Should import an address") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) watchonly_address = address['address'] watchonly_timestamp = timestamp self.log.info("Should not import an invalid address") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": "not valid address", }, "timestamp": "now", }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Invalid address') # ScriptPubKey + internal self.log.info("Should import a scriptPubKey with internal flag") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "internal": True }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) # ScriptPubKey + internal + label self.log.info( "Should not allow a label to be specified when internal is true") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "internal": True, "label": "Example label" }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Internal addresses should not have a label') # Nonstandard scriptPubKey + !internal self.log.info( "Should not import a nonstandard scriptPubKey without internal flag" ) nonstandardScriptPubKey = address['scriptPubKey'] + bytes_to_hex_str( script.CScript([script.OP_NOP])) address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal( result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.' ) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Address + Public key + !Internal self.log.info("Should import an address with public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "pubkeys": [address['pubkey']] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) # ScriptPubKey + Public key + internal self.log.info( "Should import a scriptPubKey with internal and with public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) request = [{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "pubkeys": [address['pubkey']], "internal": True }] result = self.nodes[1].importmulti(request) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) # Nonstandard scriptPubKey + Public key + !internal self.log.info( "Should not import a nonstandard scriptPubKey without internal and with public key" ) address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) request = [{ "scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", "pubkeys": [address['pubkey']] }] result = self.nodes[1].importmulti(request) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal( result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.' ) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Address + Private key + !watchonly self.log.info("Should import an address with private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address['address'])] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], True) assert_equal(address_assert['timestamp'], timestamp) self.log.info( "Should not import an address with private key if is already imported" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address['address'])] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -4) assert_equal( result[0]['error']['message'], 'The wallet already contains the private key for this address or script' ) # Address + Private key + watchonly self.log.info( "Should not import an address with private key and with watchonly") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address['address'])], "watchonly": True }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Watch-only addresses should not include private keys') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # ScriptPubKey + Private key + internal self.log.info( "Should import a scriptPubKey with internal and with private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address['address'])], "internal": True }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], True) assert_equal(address_assert['timestamp'], timestamp) # Nonstandard scriptPubKey + Private key + !internal self.log.info( "Should not import a nonstandard scriptPubKey without internal and with private key" ) address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address['address'])] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal( result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.' ) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # P2SH address sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig( 2, [ sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey'] ]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo( multi_sig_script['address']) assert_equal(address_assert['isscript'], True) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['timestamp'], timestamp) p2shunspent = self.nodes[1].listunspent( 0, 999999, [multi_sig_script['address']])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], False) # P2SH + Redeem script sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig( 2, [ sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey'] ]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": multi_sig_script['redeemScript'] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo( multi_sig_script['address']) assert_equal(address_assert['timestamp'], timestamp) p2shunspent = self.nodes[1].listunspent( 0, 999999, [multi_sig_script['address']])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], True) # P2SH + Redeem script + Private Keys + !Watchonly sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig( 2, [ sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey'] ]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.log.info( "Should import a p2sh with respective redeem script and private keys" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": multi_sig_script['redeemScript'], "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address']) ] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo( multi_sig_script['address']) assert_equal(address_assert['timestamp'], timestamp) p2shunspent = self.nodes[1].listunspent( 0, 999999, [multi_sig_script['address']])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], True) # P2SH + Redeem script + Private Keys + Watchonly sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig( 2, [ sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey'] ]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.log.info( "Should import a p2sh with respective redeem script and private keys" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": multi_sig_script['redeemScript'], "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address']) ], "watchonly": True }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Watch-only addresses should not include private keys') # Address + Public key + !Internal + Wrong pubkey self.log.info("Should not import an address with a wrong public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "pubkeys": [address2['pubkey']] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # ScriptPubKey + Public key + internal + Wrong pubkey self.log.info( "Should not import a scriptPubKey with internal and with a wrong public key" ) address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) request = [{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "pubkeys": [address2['pubkey']], "internal": True }] result = self.nodes[1].importmulti(request) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Address + Private key + !watchonly + Wrong private key self.log.info("Should not import an address with a wrong private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address2['address'])] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # ScriptPubKey + Private key + internal + Wrong private key self.log.info( "Should not import a scriptPubKey with internal and with a wrong private key" ) address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address2['address'])], "internal": True }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Importing existing watch only address with new timestamp should replace saved timestamp. assert_greater_than(timestamp, watchonly_timestamp) self.log.info("Should replace previously saved watch only timestamp.") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": watchonly_address, }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(watchonly_address) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) watchonly_timestamp = timestamp # restart nodes to check for proper serialization/deserialization of watch only address self.stop_nodes() self.start_nodes() address_assert = self.nodes[1].getaddressinfo(watchonly_address) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], watchonly_timestamp) # Bad or missing timestamps self.log.info("Should throw on invalid or missing timestamp values") assert_raises_rpc_error(-3, 'Missing required timestamp field for key', self.nodes[1].importmulti, [{ "scriptPubKey": address['scriptPubKey'], }]) assert_raises_rpc_error( -3, 'Expected number or "now" timestamp value for key. got type string', self.nodes[1].importmulti, [{ "scriptPubKey": address['scriptPubKey'], "timestamp": "", }]) # Import P2WPKH address as watch only self.log.info("Should import a P2WPKH address as watch only") address = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress(address_type="bech32")) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['solvable'], False) # Import P2WPKH address with public key but no private key self.log.info( "Should import a P2WPKH address and public key as solvable but not spendable" ) address = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress(address_type="bech32")) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "pubkeys": [address['pubkey']] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['solvable'], True) # Import P2WPKH address with key and check it is spendable self.log.info("Should import a P2WPKH address with key") address = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress(address_type="bech32")) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address['address'])] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], True) # P2WSH multisig address without scripts or keys sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].addmultisigaddress( 2, [sig_address_1['pubkey'], sig_address_2['pubkey']], "", "bech32") self.log.info( "Should import a p2wsh multisig as watch only without respective redeem script and private keys" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now" }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo( multi_sig_script['address']) assert_equal(address_assert['solvable'], False) # Same P2WSH multisig address as above, but now with witnessscript + private keys self.log.info( "Should import a p2wsh with respective redeem script and private keys" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "witnessscript": multi_sig_script['redeemScript'], "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address']) ] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo( multi_sig_script['address']) assert_equal(address_assert['solvable'], True) assert_equal(address_assert['ismine'], True) assert_equal(address_assert['sigsrequired'], 2) # P2SH-P2WPKH address with no redeemscript or public or private key sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress(address_type="p2sh-segwit")) pubkeyhash = hash160(hex_str_to_bytes(sig_address_1['pubkey'])) pkscript = CScript([OP_0, pubkeyhash]) self.log.info( "Should import a p2sh-p2wpkh without redeem script or keys") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": sig_address_1['address'] }, "timestamp": "now" }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(sig_address_1['address']) assert_equal(address_assert['solvable'], False) assert_equal(address_assert['ismine'], False) # P2SH-P2WPKH address + redeemscript + public key with no private key self.log.info( "Should import a p2sh-p2wpkh with respective redeem script and pubkey as solvable" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": sig_address_1['address'] }, "timestamp": "now", "redeemscript": bytes_to_hex_str(pkscript), "pubkeys": [sig_address_1['pubkey']] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(sig_address_1['address']) assert_equal(address_assert['solvable'], True) assert_equal(address_assert['ismine'], False) # P2SH-P2WPKH address + redeemscript + private key sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress(address_type="p2sh-segwit")) pubkeyhash = hash160(hex_str_to_bytes(sig_address_1['pubkey'])) pkscript = CScript([OP_0, pubkeyhash]) self.log.info( "Should import a p2sh-p2wpkh with respective redeem script and private keys" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": sig_address_1['address'] }, "timestamp": "now", "redeemscript": bytes_to_hex_str(pkscript), "keys": [self.nodes[0].dumpprivkey(sig_address_1['address'])] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(sig_address_1['address']) assert_equal(address_assert['solvable'], True) assert_equal(address_assert['ismine'], True) # P2SH-P2WSH 1-of-1 multisig + redeemscript with no private key sig_address_1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].addmultisigaddress( 1, [sig_address_1['pubkey']], "", "p2sh-segwit") scripthash = sha256(hex_str_to_bytes(multi_sig_script['redeemScript'])) redeem_script = CScript([OP_0, scripthash]) self.log.info( "Should import a p2sh-p2wsh with respective redeem script but no private key" ) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": bytes_to_hex_str(redeem_script), "witnessscript": multi_sig_script['redeemScript'] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo( multi_sig_script['address']) assert_equal(address_assert['solvable'], True)
def run_test (self): self.log.info("Mining blocks...") self.nodes[0].generate(1) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) #Check only one address assert_equal(node0_address1['ismine'], True) #Node 1 sync test assert_equal(self.nodes[1].getblockcount(),1) #Address Test - before import address_info = self.nodes[1].getaddressinfo(node0_address1['address']) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) # RPC importmulti ----------------------------------------------- # Bitcoin Address (implicit non-internal) self.log.info("Should import an address") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) assert_equal(address_assert['ischange'], False) watchonly_address = address['address'] watchonly_timestamp = timestamp self.log.info("Should not import an invalid address") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": "not valid address", }, "timestamp": "now", }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Invalid address') # ScriptPubKey + internal self.log.info("Should import a scriptPubKey with internal flag") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "internal": True }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) assert_equal(address_assert['ischange'], True) # ScriptPubKey + internal + label self.log.info("Should not allow a label to be specified when internal is true") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "internal": True, "label": "Example label" }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Internal addresses should not have a label') # Nonstandard scriptPubKey + !internal self.log.info("Should not import a nonstandard scriptPubKey without internal flag") nonstandardScriptPubKey = address['scriptPubKey'] + bytes_to_hex_str(script.CScript([script.OP_NOP])) address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Address + Public key + !Internal(explicit) self.log.info("Should import an address with public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "pubkeys": [ address['pubkey'] ], "internal": False }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) # ScriptPubKey + Public key + internal self.log.info("Should import a scriptPubKey with internal and with public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) request = [{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "pubkeys": [ address['pubkey'] ], "internal": True }] result = self.nodes[1].importmulti(requests=request) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) # Nonstandard scriptPubKey + Public key + !internal self.log.info("Should not import a nonstandard scriptPubKey without internal and with public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) request = [{ "scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", "pubkeys": [ address['pubkey'] ] }] result = self.nodes[1].importmulti(requests=request) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Address + Private key + !watchonly self.log.info("Should import an address with private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [ self.nodes[0].dumpprivkey(address['address']) ] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], True) assert_equal(address_assert['timestamp'], timestamp) self.log.info("Should not import an address with private key if is already imported") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [ self.nodes[0].dumpprivkey(address['address']) ] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -4) assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script') # Address + Private key + watchonly self.log.info("Should not import an address with private key and with watchonly") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [ self.nodes[0].dumpprivkey(address['address']) ], "watchonly": True }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Watch-only addresses should not include private keys') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # ScriptPubKey + Private key + internal self.log.info("Should import a scriptPubKey with internal and with private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "keys": [ self.nodes[0].dumpprivkey(address['address']) ], "internal": True }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], True) assert_equal(address_assert['timestamp'], timestamp) # Nonstandard scriptPubKey + Private key + !internal self.log.info("Should not import a nonstandard scriptPubKey without internal and with private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", "keys": [ self.nodes[0].dumpprivkey(address['address']) ] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # P2SH address sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) assert_equal(address_assert['isscript'], True) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['timestamp'], timestamp) p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], False) # P2SH + Redeem script sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": multi_sig_script['redeemScript'] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) assert_equal(address_assert['timestamp'], timestamp) p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], True) # P2SH + Redeem script + Private Keys + !Watchonly sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script and private keys") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": multi_sig_script['redeemScript'], "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) assert_equal(address_assert['timestamp'], timestamp) p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], True) # P2SH + Redeem script + Private Keys + Watchonly sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script and private keys") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": multi_sig_script['redeemScript'], "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])], "watchonly": True }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -8) assert_equal(result[0]['error']['message'], 'Watch-only addresses should not include private keys') # Address + Public key + !Internal + Wrong pubkey self.log.info("Should not import an address with a wrong public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "pubkeys": [ address2['pubkey'] ] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # ScriptPubKey + Public key + internal + Wrong pubkey self.log.info("Should not import a scriptPubKey with internal and with a wrong public key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) request = [{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "pubkeys": [ address2['pubkey'] ], "internal": True }] result = self.nodes[1].importmulti(request) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Address + Private key + !watchonly + Wrong private key self.log.info("Should not import an address with a wrong private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [ self.nodes[0].dumpprivkey(address2['address']) ] }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # ScriptPubKey + Private key + internal + Wrong private key self.log.info("Should not import a scriptPubKey with internal and with a wrong private key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) result = self.nodes[1].importmulti([{ "scriptPubKey": address['scriptPubKey'], "timestamp": "now", "keys": [ self.nodes[0].dumpprivkey(address2['address']) ], "internal": True }]) assert_equal(result[0]['success'], False) assert_equal(result[0]['error']['code'], -5) assert_equal(result[0]['error']['message'], 'Key does not match address destination') address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], False) assert_equal('timestamp' in address_assert, False) # Importing existing watch only address with new timestamp should replace saved timestamp. assert_greater_than(timestamp, watchonly_timestamp) self.log.info("Should replace previously saved watch only timestamp.") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": watchonly_address, }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(watchonly_address) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], timestamp) watchonly_timestamp = timestamp # restart nodes to check for proper serialization/deserialization of watch only address self.stop_nodes() self.start_nodes() address_assert = self.nodes[1].getaddressinfo(watchonly_address) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['timestamp'], watchonly_timestamp) # Bad or missing timestamps self.log.info("Should throw on invalid or missing timestamp values") assert_raises_rpc_error(-3, 'Missing required timestamp field for key', self.nodes[1].importmulti, [{ "scriptPubKey": address['scriptPubKey'], }]) assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string', self.nodes[1].importmulti, [{ "scriptPubKey": address['scriptPubKey'], "timestamp": "", }]) # Import P2WPKH address as watch only self.log.info("Should import a P2WPKH address as watch only") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="bech32")) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], True) assert_equal(address_assert['solvable'], False) # Import P2WPKH address with public key but no private key self.log.info("Should import a P2WPKH address and public key as solvable but not spendable") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="bech32")) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "pubkeys": [ address['pubkey'] ] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['ismine'], False) assert_equal(address_assert['solvable'], True) # Import P2WPKH address with key and check it is spendable self.log.info("Should import a P2WPKH address with key") address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="bech32")) result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": address['address'] }, "timestamp": "now", "keys": [self.nodes[0].dumpprivkey(address['address'])] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(address['address']) assert_equal(address_assert['iswatchonly'], False) assert_equal(address_assert['ismine'], True) # P2WSH multisig address without scripts or keys sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].addmultisigaddress(2, [sig_address_1['pubkey'], sig_address_2['pubkey']], "", "bech32") self.log.info("Should import a p2wsh multisig as watch only without respective redeem script and private keys") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now" }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) assert_equal(address_assert['solvable'], False) # Same P2WSH multisig address as above, but now with witnessscript + private keys self.log.info("Should import a p2wsh with respective redeem script and private keys") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "witnessscript": multi_sig_script['redeemScript'], "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address']) ] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) assert_equal(address_assert['solvable'], True) assert_equal(address_assert['ismine'], True) assert_equal(address_assert['sigsrequired'], 2) # P2SH-P2WPKH address with no redeemscript or public or private key sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="p2sh-segwit")) pubkeyhash = hash160(hex_str_to_bytes(sig_address_1['pubkey'])) pkscript = CScript([OP_0, pubkeyhash]) self.log.info("Should import a p2sh-p2wpkh without redeem script or keys") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": sig_address_1['address'] }, "timestamp": "now" }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(sig_address_1['address']) assert_equal(address_assert['solvable'], False) assert_equal(address_assert['ismine'], False) # P2SH-P2WPKH address + redeemscript + public key with no private key self.log.info("Should import a p2sh-p2wpkh with respective redeem script and pubkey as solvable") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": sig_address_1['address'] }, "timestamp": "now", "redeemscript": bytes_to_hex_str(pkscript), "pubkeys": [ sig_address_1['pubkey'] ] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(sig_address_1['address']) assert_equal(address_assert['solvable'], True) assert_equal(address_assert['ismine'], False) # P2SH-P2WPKH address + redeemscript + private key sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="p2sh-segwit")) pubkeyhash = hash160(hex_str_to_bytes(sig_address_1['pubkey'])) pkscript = CScript([OP_0, pubkeyhash]) self.log.info("Should import a p2sh-p2wpkh with respective redeem script and private keys") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": sig_address_1['address'] }, "timestamp": "now", "redeemscript": bytes_to_hex_str(pkscript), "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address'])] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(sig_address_1['address']) assert_equal(address_assert['solvable'], True) assert_equal(address_assert['ismine'], True) # P2SH-P2WSH 1-of-1 multisig + redeemscript with no private key sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) multi_sig_script = self.nodes[0].addmultisigaddress(1, [sig_address_1['pubkey']], "", "p2sh-segwit") scripthash = sha256(hex_str_to_bytes(multi_sig_script['redeemScript'])) redeem_script = CScript([OP_0, scripthash]) self.log.info("Should import a p2sh-p2wsh with respective redeem script but no private key") result = self.nodes[1].importmulti([{ "scriptPubKey": { "address": multi_sig_script['address'] }, "timestamp": "now", "redeemscript": bytes_to_hex_str(redeem_script), "witnessscript": multi_sig_script['redeemScript'] }]) assert_equal(result[0]['success'], True) address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) assert_equal(address_assert['solvable'], True)
txid_0 = tx.rehash() self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': True}], rawtxs=[raw_tx_0], ) <<<<<<< HEAD ======= self.log.info('A final transaction not in the mempool') coin = coins.pop() # Pick a random coin(base) to spend raw_tx_final = node.signrawtransactionwithwallet(node.createrawtransaction( inputs=[{'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff}], # SEQUENCE_FINAL outputs=[{node.getnewaddress(): 0.025}], locktime=node.getblockcount() + 2000, # Can be anything ))['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final))) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': True}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0) self.mempool_size += 1 >>>>>>> 3001cc61cf11e016c403ce83c9cbcfd3efcbcfd9 self.log.info('A transaction in the mempool') node.sendrawtransaction(hexstring=raw_tx_0) self.mempool_size = 1 self.check_mempool_result( result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': '18: txn-already-in-mempool'}], rawtxs=[raw_tx_0],
def run_test(self): print("Testing wallet secret recovery") self.test_wallet_recovery() print("General Confidential tests") # Running balances node0 = self.nodes[0].getbalance()["bitcoin"] assert_equal(node0, 21000000) # just making sure initialfreecoins is working node1 = 0 node2 = 0 self.nodes[0].generate(101) txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), node0, "", "", True) self.nodes[0].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance("*", 1, False, "bitcoin"), node2) # Send 3 BTC from 0 to a new unconfidential address of 2 with # the sendtoaddress call address = self.nodes[2].getnewaddress() unconfidential_address = self.nodes[2].validateaddress(address)["unconfidential"] value0 = 3 self.nodes[0].sendtoaddress(unconfidential_address, value0) self.nodes[0].generate(101) self.sync_all() node0 = node0 - value0 node2 = node2 + value0 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Send 5 BTC from 0 to a new address of 2 with the sendtoaddress call address2 = self.nodes[2].getnewaddress() unconfidential_address2 = self.nodes[2].validateaddress(address2)["unconfidential"] value1 = 5 confidential_tx_id = self.nodes[0].sendtoaddress(address2, value1) self.nodes[0].generate(101) self.sync_all() node0 = node0 - value1 node2 = node2 + value1 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Send 7 BTC from 0 to the unconfidential address of 2 and 11 BTC to the # confidential address using the raw transaction interface change_address = self.nodes[0].getnewaddress() value2 = 7 value3 = 11 value23 = value2 + value3 unspent = self.nodes[0].listunspent(1, 9999999, [], True, {"asset": "bitcoin"}) unspent = [i for i in unspent if i['amount'] > value23] assert_equal(len(unspent), 1) fee = Decimal('0.0001') tx = self.nodes[0].createrawtransaction([{"txid": unspent[0]["txid"], "vout": unspent[0]["vout"], "nValue": unspent[0]["amount"]}], {unconfidential_address: value2, address2: value3, change_address: unspent[0]["amount"] - value2 - value3 - fee, "fee":fee}) tx = self.nodes[0].blindrawtransaction(tx) tx_signed = self.nodes[0].signrawtransactionwithwallet(tx) raw_tx_id = self.nodes[0].sendrawtransaction(tx_signed['hex']) self.nodes[0].generate(101) self.sync_all() node0 -= (value2 + value3) node2 += value2 + value3 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Check 2's listreceivedbyaddress received_by_address = self.nodes[2].listreceivedbyaddress(0, False, False, "", "bitcoin") validate_by_address = [(address2, {"bitcoin": value1 + value3}), (address, {"bitcoin": value0 + value2})] assert_equal(sorted([(ele['address'], ele['amount']) for ele in received_by_address], key=lambda t: t[0]), sorted(validate_by_address, key = lambda t: t[0])) # Give an auditor (node 1) a blinding key to allow her to look at # transaction values self.nodes[1].importaddress(address2) received_by_address = self.nodes[1].listreceivedbyaddress(1, False, True) #Node sees nothing unless it understands the values assert_equal(len(received_by_address), 0) assert_equal(len(self.nodes[1].listunspent(1, 9999999, [], True, {"asset": "bitcoin"})), 0) # Import the blinding key blindingkey = self.nodes[2].dumpblindingkey(address2) self.nodes[1].importblindingkey(address2, blindingkey) # Check the auditor's gettransaction and listreceivedbyaddress # Needs rescan to update wallet txns conf_tx = self.nodes[1].gettransaction(confidential_tx_id, True) assert_equal(conf_tx['amount']["bitcoin"], value1) # Make sure wallet can now deblind part of transaction deblinded_tx = self.nodes[1].unblindrawtransaction(conf_tx['hex'])['hex'] for output in self.nodes[1].decoderawtransaction(deblinded_tx)["vout"]: if "value" in output and output["scriptPubKey"]["type"] != "fee": assert_equal(output["scriptPubKey"]["addresses"][0], self.nodes[1].validateaddress(address2)['unconfidential']) found_unblinded = True assert(found_unblinded) assert_equal(self.nodes[1].gettransaction(raw_tx_id, True)['amount']["bitcoin"], value3) list_unspent = self.nodes[1].listunspent(1, 9999999, [], True, {"asset": "bitcoin"}) assert_equal(list_unspent[0]['amount']+list_unspent[1]['amount'], value1+value3) received_by_address = self.nodes[1].listreceivedbyaddress(1, False, True) assert_equal(len(received_by_address), 1) assert_equal((received_by_address[0]['address'], received_by_address[0]['amount']['bitcoin']), (unconfidential_address2, value1 + value3)) # Spending a single confidential output and sending it to a # unconfidential output is not possible with CT. Test the # correct behavior of blindrawtransaction. unspent = self.nodes[0].listunspent(1, 9999999, [], True, {"asset": "bitcoin"}) unspent = [i for i in unspent if i['amount'] > value23] assert_equal(len(unspent), 1) tx = self.nodes[0].createrawtransaction([{"txid": unspent[0]["txid"], "vout": unspent[0]["vout"], "nValue": unspent[0]["amount"]}], {unconfidential_address: unspent[0]["amount"] - fee, "fee":fee}) # Test that blindrawtransaction adds an OP_RETURN output to balance blinders temptx = self.nodes[0].blindrawtransaction(tx) decodedtx = self.nodes[0].decoderawtransaction(temptx) assert_equal(decodedtx["vout"][-1]["scriptPubKey"]["asm"], "OP_RETURN") assert_equal(len(decodedtx["vout"]), 3) # Create same transaction but with a change/dummy output. # It should pass the blinding step. value4 = 17 change_address = self.nodes[0].getrawchangeaddress() tx = self.nodes[0].createrawtransaction([{"txid": unspent[0]["txid"], "vout": unspent[0]["vout"], "nValue": unspent[0]["amount"]}], {unconfidential_address: value4, change_address: unspent[0]["amount"] - value4 - fee, "fee":fee}) tx = self.nodes[0].blindrawtransaction(tx) tx_signed = self.nodes[0].signrawtransactionwithwallet(tx) txid = self.nodes[0].sendrawtransaction(tx_signed['hex']) decodedtx = self.nodes[0].decoderawtransaction(tx_signed["hex"]) self.nodes[0].generate(101) self.sync_all() unblindfound = False for i in range(len(decodedtx["vout"])): txout = self.nodes[0].gettxout(txid, i) if txout is not None and "asset" in txout: unblindfound = True if unblindfound == False: raise Exception("No unconfidential output detected when one should exist") node0 -= value4 node2 += value4 assert_equal(self.nodes[0].getbalance()["bitcoin"], node0) assert_equal(self.nodes[1].getbalance("*", 1, False, "bitcoin"), node1) assert_equal(self.nodes[2].getbalance()["bitcoin"], node2) # Testing wallet's ability to deblind its own outputs addr = self.nodes[0].getnewaddress() addr2 = self.nodes[0].getnewaddress() # We add two to-blind outputs, fundraw adds an already-blinded change output # If we only add one, the newly blinded will be 0-blinded because input = -output raw = self.nodes[0].createrawtransaction([], {addr:Decimal('1.1'), addr2:1}) funded = self.nodes[0].fundrawtransaction(raw) # fund again to make sure no blinded outputs were created (would fail) funded = self.nodes[0].fundrawtransaction(funded["hex"]) blinded = self.nodes[0].blindrawtransaction(funded["hex"]) # blind again to make sure we know output blinders blinded2 = self.nodes[0].blindrawtransaction(blinded) # then sign and send signed = self.nodes[0].signrawtransactionwithwallet(blinded2) self.nodes[0].sendrawtransaction(signed["hex"]) # Aside: Check all outputs after fundraw are properly marked for blinding fund_decode = self.nodes[0].decoderawtransaction(funded["hex"]) for output in fund_decode["vout"][:-1]: assert "asset" in output assert "value" in output assert output["scriptPubKey"]["type"] != "fee" assert output["commitmentnonce_fully_valid"] assert fund_decode["vout"][-1]["scriptPubKey"]["type"] == "fee" assert not fund_decode["vout"][-1]["commitmentnonce_fully_valid"] # Also check that all fundraw outputs marked for blinding are blinded later for blind_tx in [blinded, blinded2]: blind_decode = self.nodes[0].decoderawtransaction(blind_tx) for output in blind_decode["vout"][:-1]: assert "asset" not in output assert "value" not in output assert output["scriptPubKey"]["type"] != "fee" assert output["commitmentnonce_fully_valid"] assert blind_decode["vout"][-1]["scriptPubKey"]["type"] == "fee" assert "asset" in blind_decode["vout"][-1] assert "value" in blind_decode["vout"][-1] assert not blind_decode["vout"][-1]["commitmentnonce_fully_valid"] # Check createblindedaddress functionality blinded_addr = self.nodes[0].getnewaddress() validated_addr = self.nodes[0].validateaddress(blinded_addr) blinding_pubkey = self.nodes[0].validateaddress(blinded_addr)["confidential_key"] blinding_key = self.nodes[0].dumpblindingkey(blinded_addr) assert_equal(blinded_addr, self.nodes[1].createblindedaddress(validated_addr["unconfidential"], blinding_pubkey)) # If a blinding key is over-ridden by a newly imported one, funds may be unaccounted for new_addr = self.nodes[0].getnewaddress() new_validated = self.nodes[0].validateaddress(new_addr) self.nodes[2].sendtoaddress(new_addr, 1) self.sync_all() diff_blind = self.nodes[1].createblindedaddress(new_validated["unconfidential"], blinding_pubkey) assert_equal(len(self.nodes[0].listunspent(0, 0, [new_validated["unconfidential"]])), 1) self.nodes[0].importblindingkey(diff_blind, blinding_key) # CT values for this wallet transaction have been cached via importblindingkey # therefore result will be same even though we change blinding keys assert_equal(len(self.nodes[0].listunspent(0, 0, [new_validated["unconfidential"]])), 1) # Confidential Assets Tests print("Assets tests...") # Bitcoin is the first issuance assert_equal(self.nodes[0].listissuances()[0]["assetlabel"], "bitcoin") assert_equal(len(self.nodes[0].listissuances()), 1) # Unblinded issuance of asset issued = self.nodes[0].issueasset(1, 1, False) self.nodes[0].reissueasset(issued["asset"], 1) # Compare resulting fields with getrawtransaction raw_details = self.nodes[0].getrawtransaction(issued["txid"], 1) assert_equal(issued["entropy"], raw_details["vin"][issued["vin"]]["issuance"]["assetEntropy"]) assert_equal(issued["asset"], raw_details["vin"][issued["vin"]]["issuance"]["asset"]) assert_equal(issued["token"], raw_details["vin"][issued["vin"]]["issuance"]["token"]) self.nodes[0].generate(1) self.sync_all() issued2 = self.nodes[0].issueasset(2, 1) test_asset = issued2["asset"] assert_equal(self.nodes[0].getwalletinfo()['balance'][test_asset], Decimal(2)) assert(test_asset not in self.nodes[1].getwalletinfo()['balance']) # Assets balance checking, note that accounts are completely ignored because # balance queries with accounts are horrifically broken upstream assert_equal(self.nodes[0].getbalance("*", 0, False, "bitcoin"), self.nodes[0].getbalance("*", 0, False, "bitcoin")) assert_equal(self.nodes[0].getwalletinfo()['balance']['bitcoin'], self.nodes[0].getbalance("*", 0, False, "bitcoin")) # Send some bitcoin and other assets over as well to fund wallet addr = self.nodes[2].getnewaddress() self.nodes[0].sendtoaddress(addr, 5) self.nodes[0].sendmany("", {addr: 1, self.nodes[2].getnewaddress(): 13}, 0, "", [], False, 1, "UNSET", {addr: test_asset}) self.sync_all() # Should have exactly 1 in change(trusted, though not confirmed) after sending one off assert_equal(self.nodes[0].getbalance("*", 0, False, test_asset), 1) assert_equal(self.nodes[2].getunconfirmedbalance()[test_asset], Decimal(1)) b_utxos = self.nodes[2].listunspent(0, 0, [], True, {"asset": "bitcoin"}) t_utxos = self.nodes[2].listunspent(0, 0, [], True, {"asset": test_asset}) assert_equal(len(self.nodes[2].listunspent(0, 0, [])), len(b_utxos)+len(t_utxos)) # Now craft a blinded transaction via raw api rawaddrs = [] for i in range(2): rawaddrs.append(self.nodes[1].getnewaddress()) raw_assets = self.nodes[2].createrawtransaction([{"txid":b_utxos[0]['txid'], "vout":b_utxos[0]['vout'], "nValue":b_utxos[0]['amount']}, {"txid":b_utxos[1]['txid'], "vout":b_utxos[1]['vout'], "nValue":b_utxos[1]['amount'], "asset":b_utxos[1]['asset']}, {"txid":t_utxos[0]['txid'], "vout":t_utxos[0]['vout'], "nValue":t_utxos[0]['amount'], "asset":t_utxos[0]['asset']}], {rawaddrs[1]:Decimal(t_utxos[0]['amount']), rawaddrs[0]:Decimal(b_utxos[0]['amount']+b_utxos[1]['amount']-Decimal("0.01")), "fee":Decimal("0.01")}, 0, False, {rawaddrs[0]:b_utxos[0]['asset'], rawaddrs[1]:t_utxos[0]['asset'], "fee":b_utxos[0]['asset']}) # Sign unblinded, then blinded signed_assets = self.nodes[2].signrawtransactionwithwallet(raw_assets) blind_assets = self.nodes[2].blindrawtransaction(raw_assets) signed_assets = self.nodes[2].signrawtransactionwithwallet(blind_assets) # And finally send self.nodes[2].sendrawtransaction(signed_assets['hex']) self.nodes[2].generate(101) self.sync_all() issuancedata = self.nodes[2].issueasset(0, Decimal('0.00000006')) #0 of asset, 6 reissuance token # Node 2 will send node 1 a reissuance token, both will generate assets self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), Decimal('0.00000001'), "", "", False, False, 1, "UNSET", issuancedata["token"]) # node 1 needs to know about a (re)issuance to reissue itself self.nodes[1].importaddress(self.nodes[2].gettransaction(issuancedata["txid"])["details"][0]["address"]) # also send some bitcoin self.nodes[2].generate(1) self.sync_all() self.nodes[1].reissueasset(issuancedata["asset"], Decimal('0.05')) self.nodes[2].reissueasset(issuancedata["asset"], Decimal('0.025')) self.nodes[1].generate(1) self.sync_all() # Check for value accounting when asset issuance is null but token not, ie unblinded # HACK: Self-send to sweep up bitcoin inputs into blinded output. # We were hitting https://github.com/ElementsProject/elements/issues/473 for the following issuance self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[0].getwalletinfo()["balance"]["bitcoin"], "", "", True) issued = self.nodes[0].issueasset(0, 1, False) walletinfo = self.nodes[0].getwalletinfo() assert(issued["asset"] not in walletinfo["balance"]) assert_equal(walletinfo["balance"][issued["token"]], Decimal(1)) assert(issued["asset"] not in walletinfo["unconfirmed_balance"]) assert(issued["token"] not in walletinfo["unconfirmed_balance"]) # Check for value when receiving different assets by same address. self.nodes[0].sendtoaddress(unconfidential_address2, Decimal('0.00000001'), "", "", False, False, 1, "UNSET", test_asset) self.nodes[0].sendtoaddress(unconfidential_address2, Decimal('0.00000002'), "", "", False, False, 1, "UNSET", test_asset) self.nodes[0].generate(1) self.sync_all() received_by_address = self.nodes[1].listreceivedbyaddress(0, False, True) multi_asset_amount = [x for x in received_by_address if x['address'] == unconfidential_address2][0]['amount'] assert_equal(multi_asset_amount['bitcoin'], value1 + value3) assert_equal(multi_asset_amount[test_asset], Decimal('0.00000003')) # Check blinded multisig functionality and partial blinding functionality # Get two pubkeys blinded_addr = self.nodes[0].getnewaddress() pubkey = self.nodes[0].getaddressinfo(blinded_addr)["pubkey"] blinded_addr2 = self.nodes[1].getnewaddress() pubkey2 = self.nodes[1].getaddressinfo(blinded_addr2)["pubkey"] pubkeys = [pubkey, pubkey2] # Add multisig address unconfidential_addr = self.nodes[0].addmultisigaddress(2, pubkeys)["address"] self.nodes[1].addmultisigaddress(2, pubkeys) self.nodes[0].importaddress(unconfidential_addr) self.nodes[1].importaddress(unconfidential_addr) # Use blinding key from node 0's original getnewaddress call blinding_pubkey = self.nodes[0].getaddressinfo(blinded_addr)["confidential_key"] blinding_key = self.nodes[0].dumpblindingkey(blinded_addr) # Create blinded address from p2sh address and import corresponding privkey blinded_multisig_addr = self.nodes[0].createblindedaddress(unconfidential_addr, blinding_pubkey) self.nodes[0].importblindingkey(blinded_multisig_addr, blinding_key) # Issue new asset, to use different assets in one transaction when doing # partial blinding. Just to make these tests a bit more elaborate :-) issued3 = self.nodes[2].issueasset(1, 0) self.nodes[2].generate(1) self.sync_all() node2_balance = self.nodes[2].getbalance() assert(issued3['asset'] in node2_balance) assert_equal(node2_balance[issued3['asset']], Decimal(1)) # Send asset to blinded multisig address and check that it was received self.nodes[2].sendtoaddress(address=blinded_multisig_addr, amount=1, assetlabel=issued3['asset']) self.sync_all() # We will use this multisig UTXO in our partially-blinded transaction, # and will also check that multisig UTXO can be successfully spent # after the transaction is signed by node1 and node0 in succession. unspent_asset = self.nodes[0].listunspent(0, 0, [unconfidential_addr], True, {"asset":issued3['asset']}) assert_equal(len(unspent_asset), 1) assert(issued3['asset'] not in self.nodes[2].getbalance()) # Create new UTXO on node0 to be used in our partially-blinded transaction blinded_addr = self.nodes[0].getnewaddress() addr = self.nodes[0].validateaddress(blinded_addr)["unconfidential"] self.nodes[0].sendtoaddress(blinded_addr, 0.1) unspent = self.nodes[0].listunspent(0, 0, [addr]) assert_equal(len(unspent), 1) # Create new UTXO on node1 to be used in our partially-blinded transaction blinded_addr2 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].validateaddress(blinded_addr2)["unconfidential"] self.nodes[1].sendtoaddress(blinded_addr2, 0.11) unspent2 = self.nodes[1].listunspent(0, 0, [addr2]) assert_equal(len(unspent2), 1) # The transaction will have three non-fee outputs dst_addr = self.nodes[0].getnewaddress() dst_addr2 = self.nodes[1].getnewaddress() dst_addr3 = self.nodes[2].getnewaddress() # Inputs are selected up front inputs = [{"txid": unspent2[0]["txid"], "vout": unspent2[0]["vout"]}, {"txid": unspent[0]["txid"], "vout": unspent[0]["vout"]}, {"txid": unspent_asset[0]["txid"], "vout": unspent_asset[0]["vout"]}] # Create one part of the transaction to partially blind rawtx = self.nodes[0].createrawtransaction( inputs, {dst_addr2: Decimal("0.01")}) # Create another part of the transaction to partially blind rawtx2 = self.nodes[0].createrawtransaction( inputs, {dst_addr: Decimal("0.1"), dst_addr3: Decimal("1.0")}, 0, False, {dst_addr: unspent[0]['asset'], dst_addr3: unspent_asset[0]['asset']}) sum_i = unspent2[0]["amount"] + unspent[0]["amount"] sum_o = 0.01 + 0.10 + 0.1 assert_equal(int(round(sum_i*COIN)), int(round(sum_o*COIN))) # Blind the first part of the transaction - we need to supply the # assetcommmitments for all of the inputs, for the surjectionproof # to be valid after we combine the transactions blindtx = self.nodes[1].blindrawtransaction( rawtx, True, [ unspent2[0]['assetcommitment'], unspent[0]['assetcommitment'], unspent_asset[0]['assetcommitment'] ]) # Combine the transactions # Blinded, but incomplete transaction. # 3 inputs and 1 output, but no fee output, and # it was blinded with 3 asset commitments, that means # the final transaction should have 3 inputs. btx = CTransaction() btx.deserialize(io.BytesIO(hex_str_to_bytes(blindtx))) # Unblinded transaction, with 3 inputs and 2 outputs. # We will add them to the other transaction to make it complete. ubtx = CTransaction() ubtx.deserialize(io.BytesIO(hex_str_to_bytes(rawtx2))) # We will add outputs of unblinded transaction # on top of inputs and outputs of the blinded, but incomplete transaction. # We also append empty witness instances to make witness arrays match # vin/vout arrays btx.wit.vtxinwit.append(CTxInWitness()) btx.vout.append(ubtx.vout[0]) btx.wit.vtxoutwit.append(CTxOutWitness()) btx.wit.vtxinwit.append(CTxInWitness()) btx.vout.append(ubtx.vout[1]) btx.wit.vtxoutwit.append(CTxOutWitness()) # Add explicit fee output btx.vout.append(CTxOut(nValue=CTxOutValue(10000000), nAsset=CTxOutAsset(BITCOIN_ASSET_OUT))) btx.wit.vtxoutwit.append(CTxOutWitness()) # Input 0 is bitcoin asset (already blinded) # Input 1 is also bitcoin asset # Input 2 is our new asset # Blind with wrong order of assetcommitments - such transaction should be rejected blindtx = self.nodes[0].blindrawtransaction( bytes_to_hex_str(btx.serialize()), True, [ unspent_asset[0]['assetcommitment'], unspent[0]['assetcommitment'], unspent2[0]['assetcommitment'] ]) stx2 = self.nodes[1].signrawtransactionwithwallet(blindtx) stx = self.nodes[0].signrawtransactionwithwallet(stx2['hex']) self.sync_all() assert_raises_rpc_error(-26, "bad-txns-in-ne-out", self.nodes[2].sendrawtransaction, stx['hex']) # Blind with correct order of assetcommitments blindtx = self.nodes[0].blindrawtransaction( bytes_to_hex_str(btx.serialize()), True, [ unspent2[0]['assetcommitment'], unspent[0]['assetcommitment'], unspent_asset[0]['assetcommitment'] ]) stx2 = self.nodes[1].signrawtransactionwithwallet(blindtx) stx = self.nodes[0].signrawtransactionwithwallet(stx2['hex']) txid = self.nodes[2].sendrawtransaction(stx['hex']) self.nodes[2].generate(1) assert self.nodes[2].getrawtransaction(txid, 1)['confirmations'] == 1 self.sync_all() # Check that the sent asset has reached its destination unconfidential_dst_addr3 = self.nodes[2].validateaddress(dst_addr3)["unconfidential"] unspent_asset2 = self.nodes[2].listunspent(1, 1, [unconfidential_dst_addr3], True, {"asset":issued3['asset']}) assert_equal(len(unspent_asset2), 1) assert_equal(unspent_asset2[0]['amount'], Decimal(1)) # And that the balance was correctly updated assert_equal(self.nodes[2].getbalance()[issued3['asset']], Decimal(1)) # Basic checks of rawblindrawtransaction functionality blinded_addr = self.nodes[0].getnewaddress() addr = self.nodes[0].validateaddress(blinded_addr)["unconfidential"] self.nodes[0].sendtoaddress(blinded_addr, 1) self.nodes[0].sendtoaddress(blinded_addr, 3) unspent = self.nodes[0].listunspent(0, 0) rawtx = self.nodes[0].createrawtransaction([{"txid":unspent[0]["txid"], "vout":unspent[0]["vout"]}, {"txid":unspent[1]["txid"], "vout":unspent[1]["vout"]}], {addr:unspent[0]["amount"]+unspent[1]["amount"]-Decimal("0.2"), "fee":Decimal("0.2")}) # Blinding will fail with 2 blinded inputs and 0 blinded outputs # since it has no notion of a wallet to fill in a 0-value OP_RETURN output try: self.nodes[0].rawblindrawtransaction(rawtx, [unspent[0]["amountblinder"], unspent[1]["amountblinder"]], [unspent[0]["amount"], unspent[1]["amount"]], [unspent[0]["asset"], unspent[1]["asset"]], [unspent[0]["assetblinder"], unspent[1]["assetblinder"]]) raise AssertionError("Shouldn't be able to blind 2 input 0 output transaction via rawblindraw") except JSONRPCException: pass # Blinded destination added, can blind, sign and send rawtx = self.nodes[0].createrawtransaction([{"txid":unspent[0]["txid"], "vout":unspent[0]["vout"]}, {"txid":unspent[1]["txid"], "vout":unspent[1]["vout"]}], {blinded_addr:unspent[0]["amount"]+unspent[1]["amount"]-Decimal("0.002"), "fee":Decimal("0.002")}) signtx = self.nodes[0].signrawtransactionwithwallet(rawtx) try: self.nodes[0].sendrawtransaction(signtx["hex"]) raise AssertionError("Shouldn't be able to send unblinded tx with emplaced pubkey in output without additional argument") except JSONRPCException: pass blindtx = self.nodes[0].rawblindrawtransaction(rawtx, [unspent[0]["amountblinder"], unspent[1]["amountblinder"]], [unspent[0]["amount"], unspent[1]["amount"]], [unspent[0]["asset"], unspent[1]["asset"]], [unspent[0]["assetblinder"], unspent[1]["assetblinder"]]) signtx = self.nodes[0].signrawtransactionwithwallet(blindtx) txid = self.nodes[0].sendrawtransaction(signtx["hex"]) for output in self.nodes[0].decoderawtransaction(blindtx)["vout"]: if "asset" in output and output["scriptPubKey"]["type"] != "fee": raise AssertionError("An unblinded output exists") # Test fundrawtransaction with multiple assets issue = self.nodes[0].issueasset(1, 0) assetaddr = self.nodes[0].getnewaddress() rawtx = self.nodes[0].createrawtransaction([], {assetaddr:1, self.nodes[0].getnewaddress():2}, 0, False, {assetaddr:issue["asset"]}) funded = self.nodes[0].fundrawtransaction(rawtx) blinded = self.nodes[0].blindrawtransaction(funded["hex"]) signed = self.nodes[0].signrawtransactionwithwallet(blinded) txid = self.nodes[0].sendrawtransaction(signed["hex"]) # Test fundrawtransaction with multiple inputs, creating > vout.size change rawtx = self.nodes[0].createrawtransaction([{"txid":txid, "vout":0}, {"txid":txid, "vout":1}], {self.nodes[0].getnewaddress():5}) funded = self.nodes[0].fundrawtransaction(rawtx) blinded = self.nodes[0].blindrawtransaction(funded["hex"]) signed = self.nodes[0].signrawtransactionwithwallet(blinded) txid = self.nodes[0].sendrawtransaction(signed["hex"]) # Test corner case where wallet appends a OP_RETURN output, yet doesn't blind it # due to the fact that the output value is 0-value and input pedersen commitments # self-balance. This is rare corner case, but ok. unblinded = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())["unconfidential"] self.nodes[0].sendtoaddress(unblinded, self.nodes[0].getbalance()["bitcoin"], "", "", True) # Make tx with blinded destination and change outputs only self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[0].getbalance()["bitcoin"]/2) # Send back again, this transaction should have 3 outputs, all unblinded txid = self.nodes[0].sendtoaddress(unblinded, self.nodes[0].getbalance()["bitcoin"], "", "", True) outputs = self.nodes[0].getrawtransaction(txid, 1)["vout"] assert_equal(len(outputs), 3) assert("value" in outputs[0] and "value" in outputs[1] and "value" in outputs[2]) assert_equal(outputs[2]["scriptPubKey"]["type"], 'nulldata') # Test burn argument in createrawtransaction raw_burn1 = self.nodes[0].createrawtransaction([], {self.nodes[0].getnewaddress():1, "burn":2}) decode_burn1 = self.nodes[0].decoderawtransaction(raw_burn1) assert_equal(len(decode_burn1["vout"]), 2) found_pay = False found_burn = False for output in decode_burn1["vout"]: if output["scriptPubKey"]["asm"] == "OP_RETURN": found_burn = True if output["asset"] != self.nodes[0].dumpassetlabels()["bitcoin"]: raise Exception("Burn should have been bitcoin(policyAsset)") if output["scriptPubKey"]["type"] == "scripthash": found_pay = True assert(found_pay and found_burn) raw_burn2 = self.nodes[0].createrawtransaction([], {self.nodes[0].getnewaddress():1, "burn":2}, 101, False, {"burn":"deadbeef"*8}) decode_burn2 = self.nodes[0].decoderawtransaction(raw_burn2) assert_equal(len(decode_burn2["vout"]), 2) found_pay = False found_burn = False for output in decode_burn2["vout"]: if output["scriptPubKey"]["asm"] == "OP_RETURN": found_burn = True if output["asset"] != "deadbeef"*8: raise Exception("Burn should have been deadbeef") if output["scriptPubKey"]["type"] == "scripthash": found_pay = True assert(found_pay and found_burn)
def run_test(self): self.description = "Covers the reorg with a zc public spend in vtx" self.init_test() DENOM_TO_USE = 10 # zc denomination INITAL_MINED_BLOCKS = 321 # First mined blocks (rewards collected to mint) MORE_MINED_BLOCKS = 105 # More blocks mined before spending zerocoins # 1) Starting mining blocks self.log.info("Mining %d blocks.." % INITAL_MINED_BLOCKS) self.node.generate(INITAL_MINED_BLOCKS) # 2) Mint 2 zerocoins self.node.mintzerocoin(DENOM_TO_USE) self.node.generate(1) self.node.mintzerocoin(DENOM_TO_USE) self.node.generate(1) # 3) Mine additional blocks and collect the mints self.log.info("Mining %d blocks.." % MORE_MINED_BLOCKS) self.node.generate(MORE_MINED_BLOCKS) self.log.info("Collecting mints...") mints = self.node.listmintedzerocoins(True, False) assert len(mints) == 2, "mints list has len %d (!= 2)" % len(mints) # 4) Get unspent coins and chain tip self.unspent = self.node.listunspent() block_count = self.node.getblockcount() pastBlockHash = self.node.getblockhash(block_count) self.log.info( "Block count: %d - Current best: %s..." % (self.node.getblockcount(), self.node.getbestblockhash()[:5])) pastBlock = CBlock() pastBlock.deserialize( BytesIO(hex_str_to_bytes(self.node.getblock(pastBlockHash, False)))) checkpoint = pastBlock.nAccumulatorCheckpoint # 5) get the raw zerocoin spend txes self.log.info("Getting the raw zerocoin public spends...") public_spend_A = self.node.createrawzerocoinpublicspend( mints[0].get("serial hash")) tx_A = CTransaction() tx_A.deserialize(BytesIO(hex_str_to_bytes(public_spend_A))) tx_A.rehash() public_spend_B = self.node.createrawzerocoinpublicspend( mints[1].get("serial hash")) tx_B = CTransaction() tx_B.deserialize(BytesIO(hex_str_to_bytes(public_spend_B))) tx_B.rehash() # Spending same coins to different recipients to get different txids my_addy = "yAVWM5urwaTyhiuFQHP2aP47rdZsLUG5PH" public_spend_A2 = self.node.createrawzerocoinpublicspend( mints[0].get("serial hash"), my_addy) tx_A2 = CTransaction() tx_A2.deserialize(BytesIO(hex_str_to_bytes(public_spend_A2))) tx_A2.rehash() public_spend_B2 = self.node.createrawzerocoinpublicspend( mints[1].get("serial hash"), my_addy) tx_B2 = CTransaction() tx_B2.deserialize(BytesIO(hex_str_to_bytes(public_spend_B2))) tx_B2.rehash() self.log.info("tx_A id: %s" % str(tx_A.hash)) self.log.info("tx_B id: %s" % str(tx_B.hash)) self.log.info("tx_A2 id: %s" % str(tx_A2.hash)) self.log.info("tx_B2 id: %s" % str(tx_B2.hash)) self.test_nodes[0].handle_connect() # 6) create block_A --> main chain self.log.info("") self.log.info("*** block_A ***") self.log.info("Creating block_A [%d] with public spend tx_A in it." % (block_count + 1)) block_A = self.new_block(block_count, pastBlock, checkpoint, tx_A) self.log.info("Hash of block_A: %s..." % block_A.hash[:5]) self.log.info("sending block_A...") var = self.node.submitblock(bytes_to_hex_str(block_A.serialize())) if var is not None: self.log.info("result: %s" % str(var)) raise Exception("block_A not accepted") time.sleep(2) assert_equal(self.node.getblockcount(), block_count + 1) assert_equal(self.node.getbestblockhash(), block_A.hash) self.log.info(" >> block_A connected <<") self.log.info( "Current chain: ... --> block_0 [%d] --> block_A [%d]\n" % (block_count, block_count + 1)) # 7) create block_B --> forked chain self.log.info("*** block_B ***") self.log.info("Creating block_B [%d] with public spend tx_B in it." % (block_count + 1)) block_B = self.new_block(block_count, pastBlock, checkpoint, tx_B) self.log.info("Hash of block_B: %s..." % block_B.hash[:5]) self.log.info("sending block_B...") var = self.node.submitblock(bytes_to_hex_str(block_B.serialize())) self.log.info("result of block_B submission: %s" % str(var)) time.sleep(2) assert_equal(self.node.getblockcount(), block_count + 1) assert_equal(self.node.getbestblockhash(), block_A.hash) # block_B is not added. Chain remains the same self.log.info(" >> block_B not connected <<") self.log.info( "Current chain: ... --> block_0 [%d] --> block_A [%d]\n" % (block_count, block_count + 1)) # 8) Create new block block_C on the forked chain (block_B) block_count += 1 self.log.info("*** block_C ***") self.log.info( "Creating block_C [%d] on top of block_B triggering the reorg" % (block_count + 1)) block_C = self.new_block(block_count, block_B, checkpoint) self.log.info("Hash of block_C: %s..." % block_C.hash[:5]) self.log.info("sending block_C...") var = self.node.submitblock(bytes_to_hex_str(block_C.serialize())) if var is not None: self.log.info("result: %s" % str(var)) raise Exception("block_C not accepted") time.sleep(2) assert_equal(self.node.getblockcount(), block_count + 1) assert_equal(self.node.getbestblockhash(), block_C.hash) self.log.info( " >> block_A disconnected / block_B and block_C connected <<") self.log.info( "Current chain: ... --> block_0 [%d] --> block_B [%d] --> block_C [%d]\n" % (block_count - 1, block_count, block_count + 1)) # 7) Now create block_D which tries to spend same coin of tx_B again on the (new) main chain # (this block will be rejected) block_count += 1 self.log.info("*** block_D ***") self.log.info( "Creating block_D [%d] trying to double spend the coin of tx_B" % (block_count + 1)) block_D = self.new_block(block_count, block_C, checkpoint, tx_B2) self.log.info("Hash of block_D: %s..." % block_D.hash[:5]) self.log.info("sending block_D...") var = self.node.submitblock(bytes_to_hex_str(block_D.serialize())) self.log.info("result of block_D submission: %s" % str(var)) time.sleep(2) assert_equal(self.node.getblockcount(), block_count) assert_equal(self.node.getbestblockhash(), block_C.hash) # block_D is not added. Chain remains the same self.log.info(" >> block_D rejected <<") self.log.info( "Current chain: ... --> block_0 [%d] --> block_B [%d] --> block_C [%d]\n" % (block_count - 2, block_count - 1, block_count)) # 8) Now create block_E which spends tx_A again on main chain # (this block will be accepted and connected since tx_A was spent on block_A now disconnected) self.log.info("*** block_E ***") self.log.info("Creating block_E [%d] trying spend tx_A on main chain" % (block_count + 1)) block_E = self.new_block(block_count, block_C, checkpoint, tx_A) self.log.info("Hash of block_E: %s..." % block_E.hash[:5]) self.log.info("sending block_E...") var = self.node.submitblock(bytes_to_hex_str(block_E.serialize())) if var is not None: self.log.info("result: %s" % str(var)) raise Exception("block_E not accepted") time.sleep(2) assert_equal(self.node.getblockcount(), block_count + 1) assert_equal(self.node.getbestblockhash(), block_E.hash) self.log.info(" >> block_E connected <<") self.log.info( "Current chain: ... --> block_0 [%d] --> block_B [%d] --> block_C [%d] --> block_E [%d]\n" % (block_count - 2, block_count - 1, block_count, block_count + 1)) # 9) Now create block_F which tries to double spend the coin in tx_A # # (this block will be rejected) block_count += 1 self.log.info("*** block_F ***") self.log.info( "Creating block_F [%d] trying to double spend the coin in tx_A" % (block_count + 1)) block_F = self.new_block(block_count, block_E, checkpoint, tx_A2) self.log.info("Hash of block_F: %s..." % block_F.hash[:5]) self.log.info("sending block_F...") var = self.node.submitblock(bytes_to_hex_str(block_F.serialize())) self.log.info("result of block_F submission: %s" % str(var)) time.sleep(2) assert_equal(self.node.getblockcount(), block_count) assert_equal(self.node.getbestblockhash(), block_E.hash) self.log.info(" >> block_F rejected <<") self.log.info( "Current chain: ... --> block_0 [%d] --> block_B [%d] --> block_C [%d] --> block_E [%d]\n" % (block_count - 3, block_count - 2, block_count - 1, block_count)) self.log.info("All good.")
def test_coinbase_witness(self): def WitToHex(obj): return bytes_to_hex_str(obj.serialize(with_witness=True)) block = self.nodes[0].getnewblockhex() block_struct = FromHex(CBlock(), block) # Test vanilla block round-trip self.nodes[0].testproposedblock(WitToHex(block_struct)) # Assert there's scriptWitness in the coinbase input that is the witness nonce and nothing else assert_equal(block_struct.vtx[0].wit.vtxinwit[0].scriptWitness.stack, [b'\x00' * 32]) assert_equal( block_struct.vtx[0].wit.vtxinwit[0].vchIssuanceAmountRangeproof, b'') assert_equal( block_struct.vtx[0].wit.vtxinwit[0].vchInflationKeysRangeproof, b'') assert_equal(block_struct.vtx[0].wit.vtxinwit[0].peginWitness.stack, []) # Add extra witness that isn't covered by witness merkle root, make sure blocks are still valid block_witness_stuffed = copy.deepcopy(block_struct) block_witness_stuffed.vtx[0].wit.vtxinwit[ 0].vchIssuanceAmountRangeproof = b'\x00' assert_raises_rpc_error(-25, "bad-cb-witness", self.nodes[0].testproposedblock, WitToHex(block_witness_stuffed)) block_witness_stuffed = copy.deepcopy(block_struct) block_witness_stuffed.vtx[0].wit.vtxinwit[ 0].vchInflationKeysRangeproof = b'\x00' assert_raises_rpc_error(-25, "bad-cb-witness", self.nodes[0].testproposedblock, WitToHex(block_witness_stuffed)) block_witness_stuffed = copy.deepcopy(block_struct) # Let's blow out block weight limit by adding 4MW here block_witness_stuffed.vtx[0].wit.vtxinwit[0].peginWitness.stack = [ b'\x00' * 4000000 ] assert_raises_rpc_error(-25, "bad-cb-witness", self.nodes[0].testproposedblock, WitToHex(block_witness_stuffed)) # Test that node isn't blinded to the block # Previously an over-stuffed block >4MW would have been marked permanently bad # as it already passes witness merkle and regular merkle root checks block_height = self.nodes[0].getblockcount() assert_equal( self.nodes[0].submitblock(WitToHex(block_witness_stuffed)), "bad-cb-witness") assert_equal(block_height, self.nodes[0].getblockcount()) assert_equal(self.nodes[0].submitblock(WitToHex(block_struct)), None) assert_equal(block_height + 1, self.nodes[0].getblockcount()) # New block since we used the first one block_struct = FromHex(CBlock(), self.nodes[0].getnewblockhex()) block_witness_stuffed = copy.deepcopy(block_struct) # Add extra witness data that is covered by witness merkle root, make sure invalid assert_equal( block_witness_stuffed.vtx[0].wit.vtxoutwit[0].vchSurjectionproof, b'') assert_equal( block_witness_stuffed.vtx[0].wit.vtxoutwit[0].vchRangeproof, b'') block_witness_stuffed.vtx[0].wit.vtxoutwit[ 0].vchRangeproof = b'\x00' * 100000 block_witness_stuffed.vtx[0].wit.vtxoutwit[ 0].vchSurjectionproof = b'\x00' * 100000 assert_raises_rpc_error(-25, "bad-witness-merkle-match", self.nodes[0].testproposedblock, WitToHex(block_witness_stuffed)) witness_root_hex = block_witness_stuffed.calc_witness_merkle_root() witness_root = uint256_from_str( hex_str_to_bytes(witness_root_hex)[::-1]) block_witness_stuffed.vtx[0].vout[-1] = CTxOut( 0, get_witness_script(witness_root, 0)) block_witness_stuffed.vtx[0].rehash() block_witness_stuffed.hashMerkleRoot = block_witness_stuffed.calc_merkle_root( ) block_witness_stuffed.rehash() assert_raises_rpc_error(-25, "bad-cb-amount", self.nodes[0].testproposedblock, WitToHex(block_witness_stuffed)) assert_greater_than( len(WitToHex(block_witness_stuffed)), 100000 * 4) # Make sure the witness data is actually serialized # A CTxIn that always serializes the asset issuance, even for coinbases. class AlwaysIssuanceCTxIn(CTxIn): def serialize(self): r = b'' outpoint = COutPoint() outpoint.hash = self.prevout.hash outpoint.n = self.prevout.n outpoint.n |= OUTPOINT_ISSUANCE_FLAG r += outpoint.serialize() r += ser_string(self.scriptSig) r += struct.pack("<I", self.nSequence) r += self.assetIssuance.serialize() return r # Test that issuance inputs in coinbase don't survive a serialization round-trip # (even though this can't cause issuance to occur either way due to VerifyCoinbaseAmount semantics) block_witness_stuffed = copy.deepcopy(block_struct) coinbase_orig = copy.deepcopy(block_witness_stuffed.vtx[0].vin[0]) coinbase_ser_size = len( block_witness_stuffed.vtx[0].vin[0].serialize()) block_witness_stuffed.vtx[0].vin[0] = AlwaysIssuanceCTxIn() block_witness_stuffed.vtx[0].vin[0].prevout = coinbase_orig.prevout block_witness_stuffed.vtx[0].vin[0].scriptSig = coinbase_orig.scriptSig block_witness_stuffed.vtx[0].vin[0].nSequence = coinbase_orig.nSequence block_witness_stuffed.vtx[0].vin[0].assetIssuance.nAmount.setToAmount( 1) bad_coinbase_ser_size = len( block_witness_stuffed.vtx[0].vin[0].serialize()) # 32+32+9+1 should be serialized for each assetIssuance field assert_equal(bad_coinbase_ser_size, coinbase_ser_size + 32 + 32 + 9 + 1) assert (not block_witness_stuffed.vtx[0].vin[0].assetIssuance.isNull()) assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, ToHex(block_witness_stuffed.vtx[0]))
def run_test(self): url = urllib.parse.urlparse(self.nodes[0].url) self.log.info("Mining blocks...") self.nodes[0].generate(1) self.sync_all() self.nodes[2].generate(100) self.sync_all() assert_equal(self.nodes[0].getbalance(), 5000) txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) self.sync_all() self.nodes[2].generate(1) self.sync_all() bb_hash = self.nodes[0].getbestblockhash() assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) #balance now should be 0.1 on node 1 # load the latest 0.1 tx over the REST API json_string = http_get_call( url.hostname, url.port, '/rest/tx/' + txid + self.FORMAT_SEPARATOR + "json") json_obj = json.loads(json_string) vintx = json_obj['vin'][0][ 'txid'] # get the vin to later check for utxo (should be spent by then) # get n of 0.1 outpoint n = 0 for vout in json_obj['vout']: if vout['value'] == 0.1: n = vout['n'] ####################################### # GETUTXOS: query an unspent outpoint # ####################################### json_request = '/checkmempool/' + txid + '-' + str(n) json_string = http_get_call( url.hostname, url.port, '/rest/getutxos' + json_request + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) #check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) #make sure there is one utxo assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['utxos'][0]['value'], 0.1) ################################################# # GETUTXOS: now query an already spent outpoint # ################################################# json_request = '/checkmempool/' + vintx + '-0' json_string = http_get_call( url.hostname, url.port, '/rest/getutxos' + json_request + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) #check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # make sure there is no utxo in the response because this oupoint has been spent assert_equal(len(json_obj['utxos']), 0) #check bitmap assert_equal(json_obj['bitmap'], "0") ################################################## # GETUTXOS: now check both with the same request # ################################################## json_request = '/checkmempool/' + txid + '-' + str( n) + '/' + vintx + '-0' json_string = http_get_call( url.hostname, url.port, '/rest/getutxos' + json_request + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['bitmap'], "10") #test binary response bb_hash = self.nodes[0].getbestblockhash() binaryRequest = b'\x01\x02' binaryRequest += hex_str_to_bytes(txid) binaryRequest += pack("i", n) binaryRequest += hex_str_to_bytes(vintx) binaryRequest += pack("i", 0) bin_response = http_post_call( url.hostname, url.port, '/rest/getutxos' + self.FORMAT_SEPARATOR + 'bin', binaryRequest) output = BytesIO() output.write(bin_response) output.seek(0) chainHeight = unpack("i", output.read(4))[0] hashFromBinResponse = hex(deser_uint256(output))[2:].zfill(64) assert_equal( bb_hash, hashFromBinResponse ) #check if getutxo's chaintip during calculation was fine assert_equal(chainHeight, 102) #chain height must be 102 ############################ # GETUTXOS: mempool checks # ############################ # do a tx and don't sync txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) json_string = http_get_call( url.hostname, url.port, '/rest/tx/' + txid + self.FORMAT_SEPARATOR + "json") json_obj = json.loads(json_string) #vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then) # get n of 0.1 outpoint n = 0 for vout in json_obj['vout']: if vout['value'] == 0.1: n = vout['n'] json_request = '/' + txid + '-' + str(n) json_string = http_get_call( url.hostname, url.port, '/rest/getutxos' + json_request + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) assert_equal( len(json_obj['utxos']), 0 ) #there should be an outpoint because it has just added to the mempool json_request = '/checkmempool/' + txid + '-' + str(n) json_string = http_get_call( url.hostname, url.port, '/rest/getutxos' + json_request + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) assert_equal( len(json_obj['utxos']), 1 ) #there should be an outpoint because it has just added to the mempool #do some invalid requests json_request = '{"checkmempool' response = http_post_call( url.hostname, url.port, '/rest/getutxos' + self.FORMAT_SEPARATOR + 'json', json_request, True) assert_equal( response.status, 400) #must be a 400 because we send an invalid json request json_request = '{"checkmempool' response = http_post_call( url.hostname, url.port, '/rest/getutxos' + self.FORMAT_SEPARATOR + 'bin', json_request, True) assert_equal( response.status, 400) #must be a 400 because we send an invalid bin request response = http_post_call( url.hostname, url.port, '/rest/getutxos/checkmempool' + self.FORMAT_SEPARATOR + 'bin', '', True) assert_equal( response.status, 400) #must be a 400 because we send an invalid bin request #test limits json_request = '/checkmempool/' for _ in range(0, 20): json_request += txid + '-' + str(n) + '/' json_request = json_request.rstrip("/") response = http_post_call( url.hostname, url.port, '/rest/getutxos' + json_request + self.FORMAT_SEPARATOR + 'json', '', True) assert_equal(response.status, 400) #must be a 400 because we exceeding the limits json_request = '/checkmempool/' for _ in range(0, 15): json_request += txid + '-' + str(n) + '/' json_request = json_request.rstrip("/") response = http_post_call( url.hostname, url.port, '/rest/getutxos' + json_request + self.FORMAT_SEPARATOR + 'json', '', True) assert_equal(response.status, 200) #must be a 200 because we are within the limits self.nodes[0].generate(1) #generate block to not affect upcoming tests self.sync_all() ################ # /rest/block/ # ################ # check binary format response = http_get_call( url.hostname, url.port, '/rest/block/' + bb_hash + self.FORMAT_SEPARATOR + "bin", True) assert_equal(response.status, 200) assert_greater_than(int(response.getheader('content-length')), 80) response_str = response.read() # compare with block header response_header = http_get_call( url.hostname, url.port, '/rest/headers/1/' + bb_hash + self.FORMAT_SEPARATOR + "bin", True) assert_equal(response_header.status, 200) assert_equal(int(response_header.getheader('content-length')), 80) response_header_str = response_header.read() assert_equal(response_str[0:80], response_header_str) # check block hex format response_hex = http_get_call( url.hostname, url.port, '/rest/block/' + bb_hash + self.FORMAT_SEPARATOR + "hex", True) assert_equal(response_hex.status, 200) assert_greater_than(int(response_hex.getheader('content-length')), 160) response_hex_str = response_hex.read() assert_equal( encode(response_str, "hex_codec")[0:160], response_hex_str[0:160]) # compare with hex block header response_header_hex = http_get_call( url.hostname, url.port, '/rest/headers/1/' + bb_hash + self.FORMAT_SEPARATOR + "hex", True) assert_equal(response_header_hex.status, 200) assert_greater_than( int(response_header_hex.getheader('content-length')), 160) response_header_hex_str = response_header_hex.read() assert_equal(response_hex_str[0:160], response_header_hex_str[0:160]) assert_equal( encode(response_header_str, "hex_codec")[0:160], response_header_hex_str[0:160]) # check json format block_json_string = http_get_call( url.hostname, url.port, '/rest/block/' + bb_hash + self.FORMAT_SEPARATOR + 'json') block_json_obj = json.loads(block_json_string) assert_equal(block_json_obj['hash'], bb_hash) # compare with json block header response_header_json = http_get_call( url.hostname, url.port, '/rest/headers/1/' + bb_hash + self.FORMAT_SEPARATOR + "json", True) assert_equal(response_header_json.status, 200) response_header_json_str = response_header_json.read().decode('utf-8') json_obj = json.loads(response_header_json_str, parse_float=Decimal) assert_equal(len(json_obj), 1) #ensure that there is one header in the json response assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same #compare with normal RPC block response rpc_block_json = self.nodes[0].getblock(bb_hash) assert_equal(json_obj[0]['hash'], rpc_block_json['hash']) assert_equal(json_obj[0]['confirmations'], rpc_block_json['confirmations']) assert_equal(json_obj[0]['height'], rpc_block_json['height']) assert_equal(json_obj[0]['version'], rpc_block_json['version']) assert_equal(json_obj[0]['merkleroot'], rpc_block_json['merkleroot']) assert_equal(json_obj[0]['time'], rpc_block_json['time']) assert_equal(json_obj[0]['nonce'], rpc_block_json['nonce']) assert_equal(json_obj[0]['bits'], rpc_block_json['bits']) assert_equal(json_obj[0]['difficulty'], rpc_block_json['difficulty']) assert_equal(json_obj[0]['chainwork'], rpc_block_json['chainwork']) assert_equal(json_obj[0]['previousblockhash'], rpc_block_json['previousblockhash']) #see if we can get 5 headers in one response self.nodes[1].generate(5) self.sync_all() response_header_json = http_get_call( url.hostname, url.port, '/rest/headers/5/' + bb_hash + self.FORMAT_SEPARATOR + "json", True) assert_equal(response_header_json.status, 200) response_header_json_str = response_header_json.read().decode('utf-8') json_obj = json.loads(response_header_json_str) assert_equal(len(json_obj), 5) #now we should have 5 header objects # do tx test tx_hash = block_json_obj['tx'][0]['txid'] json_string = http_get_call( url.hostname, url.port, '/rest/tx/' + tx_hash + self.FORMAT_SEPARATOR + "json") json_obj = json.loads(json_string) assert_equal(json_obj['txid'], tx_hash) # check hex format response hex_string = http_get_call( url.hostname, url.port, '/rest/tx/' + tx_hash + self.FORMAT_SEPARATOR + "hex", True) assert_equal(hex_string.status, 200) assert_greater_than(int(response.getheader('content-length')), 10) # check block tx details # let's make 3 tx and mine them on node 1 txs = [ self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11), self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11), self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) ] self.sync_all() # check that there are exactly 3 transactions in the TX memory pool before generating the block json_string = http_get_call( url.hostname, url.port, '/rest/mempool/info' + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) assert_equal(json_obj['size'], 3) # the size of the memory pool should be greater than 3x ~100 bytes assert_greater_than(json_obj['bytes'], 300) # check that there are our submitted transactions in the TX memory pool json_string = http_get_call( url.hostname, url.port, '/rest/mempool/contents' + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) for tx in txs: assert_equal(tx in json_obj, True) # now mine the transactions newblockhash = self.nodes[1].generate(1) self.sync_all() #check if the 3 tx show up in the new block json_string = http_get_call( url.hostname, url.port, '/rest/block/' + newblockhash[0] + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) for tx in json_obj['tx']: if not 'coinbase' in tx['vin'][0]: #exclude coinbase assert_equal(tx['txid'] in txs, True) #check the same but without tx details json_string = http_get_call( url.hostname, url.port, '/rest/block/notxdetails/' + newblockhash[0] + self.FORMAT_SEPARATOR + 'json') json_obj = json.loads(json_string) for tx in txs: assert_equal(tx in json_obj['tx'], True) #test rest bestblock bb_hash = self.nodes[0].getbestblockhash() json_string = http_get_call(url.hostname, url.port, '/rest/chaininfo.json') json_obj = json.loads(json_string) assert_equal(json_obj['bestblockhash'], bb_hash)
def run_test(self): node = self.nodes[0] self.log.info('Start with empty mempool, and 200 blocks') self.mempool_size = 0 assert_equal(node.getblockcount(), 200) assert_equal(node.getmempoolinfo()['size'], self.mempool_size) coins = node.listunspent() self.log.info('Should not accept garbage to testmempoolaccept') assert_raises_rpc_error( -3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar')) assert_raises_rpc_error( -8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22'])) assert_raises_rpc_error( -22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar'])) self.log.info('A transaction already in the blockchain') coin = coins.pop() # Pick a random coin(base) to spend raw_tx_in_block = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ 'txid': coin['txid'], 'vout': coin['vout'] }], outputs=[{ node.getnewaddress(): 0.3 }, { node.getnewaddress(): 49 }], ))['hex'] txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, maxfeerate=0) node.generate(1) self.mempool_size = 0 self.check_mempool_result( result_expected=[{ 'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known' }], rawtxs=[raw_tx_in_block], ) self.log.info('A transaction not in the mempool') fee = 0.00000700 raw_tx_0 = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ "txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER }], # RBF is used later outputs=[{ node.getnewaddress(): 0.3 - fee }], ))['hex'] tx = CTransaction() tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) txid_0 = tx.rehash() self.check_mempool_result( result_expected=[{ 'txid': txid_0, 'allowed': True, 'fee': fee * COIN }], rawtxs=[raw_tx_0], ) self.log.info('A final transaction not in the mempool') coin = coins.pop() # Pick a random coin(base) to spend raw_tx_final = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ 'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff }], # SEQUENCE_FINAL outputs=[{ node.getnewaddress(): 0.025 }], locktime=node.getblockcount() + 2000, # Can be anything ))['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final))) self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': True, 'fee': (int(coin['amount']) - (0.025)) * COIN }], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0) self.mempool_size += 1 self.log.info('A transaction in the mempool') node.sendrawtransaction(hexstring=raw_tx_0) self.mempool_size += 1 self.check_mempool_result( result_expected=[{ 'txid': txid_0, 'allowed': False, 'reject-reason': 'txn-already-in-mempool' }], rawtxs=[raw_tx_0], ) self.log.info('A transaction that replaces a mempool transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vout[0].nValue -= int(fee * COIN) # Double the fee tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1 # Now, opt out of RBF raw_tx_0 = node.signrawtransactionwithwallet( tx.serialize().hex())['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) txid_0 = tx.rehash() self.check_mempool_result( result_expected=[{ 'txid': txid_0, 'allowed': True, 'fee': (2 * fee * COIN) }], rawtxs=[raw_tx_0], ) self.log.info('A transaction that conflicts with an unconfirmed tx') # Send the transaction that replaces the mempool transaction and opts out of replaceability node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0) # take original raw_tx_0 tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vout[0].nValue -= int(4 * fee * COIN) # Set more fee # skip re-signing the tx self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'txn-mempool-conflict' }], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) self.log.info('A transaction with missing inputs, that never existed') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14) # skip re-signing the tx self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs' }], rawtxs=[tx.serialize().hex()], ) self.log.info( 'A transaction with missing inputs, that existed once in the past') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0))) tx.vin[ 0].prevout.n = 1 # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend raw_tx_1 = node.signrawtransactionwithwallet( tx.serialize().hex())['hex'] txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0) # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them raw_tx_spend_both = node.signrawtransactionwithwallet( node.createrawtransaction(inputs=[ { 'txid': txid_0, 'vout': 0 }, { 'txid': txid_1, 'vout': 0 }, ], outputs=[{ node.getnewaddress(): 0.1 }]))['hex'] txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, maxfeerate=0) node.generate(1) self.mempool_size = 0 # Now see if we can add the coins back to the utxo set by sending the exact txs again self.check_mempool_result( result_expected=[{ 'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs' }], rawtxs=[raw_tx_0], ) self.check_mempool_result( result_expected=[{ 'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs' }], rawtxs=[raw_tx_1], ) self.log.info('Create a signed "reference" tx for later use') raw_tx_reference = node.signrawtransactionwithwallet( node.createrawtransaction( inputs=[{ 'txid': txid_spend_both, 'vout': 0 }], outputs=[{ node.getnewaddress(): 0.05 }], ))['hex'] tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) # Reference tx should be valid on itself self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': True, 'fee': (0.1 - 0.05) * COIN }], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) self.log.info('A transaction with no outputs') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout = [] # Skip re-signing the transaction for context independent checks from now on # tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(tx.serialize().hex())['hex']))) self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-empty' }], rawtxs=[tx.serialize().hex()], ) self.log.info('A really large transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin = [tx.vin[0]] * math.ceil( MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize())) self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-oversize' }], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with negative output value') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].nValue *= -1 self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-negative' }], rawtxs=[tx.serialize().hex()], ) # The following two validations prevent overflow of the output amounts (see CVE-2010-5139). self.log.info('A transaction with too large output value') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].nValue = MAX_MONEY + 1 self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-toolarge' }], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with too large sum of output values') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout = [tx.vout[0]] * 2 tx.vout[0].nValue = MAX_MONEY self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-txouttotal-toolarge' }], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction with duplicate inputs') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin = [tx.vin[0]] * 2 self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-inputs-duplicate' }], rawtxs=[tx.serialize().hex()], ) self.log.info('A coinbase transaction') # Pick the input of the first tx we signed, so it has to be a coinbase tx raw_tx_coinbase_spent = node.getrawtransaction( txid=node.decoderawtransaction( hexstring=raw_tx_in_block)['vin'][0]['txid']) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent))) self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'coinbase' }], rawtxs=[tx.serialize().hex()], ) self.log.info('Some nonstandard transactions') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.nVersion = 3 # A version currently non-standard self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'version' }], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptpubkey' }], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) key = ECKey() key.generate() pubkey = key.get_pubkey().get_bytes() tx.vout[0].scriptPubKey = CScript( [OP_2, pubkey, pubkey, pubkey, OP_3, OP_CHECKMULTISIG]) # Some bare multisig script (2-of-3) self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bare-multisig' }], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].scriptSig = CScript([OP_HASH160 ]) # Some not-pushonly scriptSig self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-not-pushonly' }], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[0].scriptSig = CScript( [b'a' * 1648]) # Some too large scriptSig (>1650 bytes) self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size' }], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript( [OP_HASH160, hash160(b'burn'), OP_EQUAL])) num_scripts = 100000 // len(output_p2sh_burn.serialize( )) # Use enough outputs to make the tx too large for our policy tx.vout = [output_p2sh_burn] * num_scripts self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size' }], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0] = output_p2sh_burn tx.vout[ 0].nValue -= 1 # Make output smaller, such that it is dust for our policy self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust' }], rawtxs=[tx.serialize().hex()], ) tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff']) tx.vout = [tx.vout[0]] * 2 self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return' }], rawtxs=[tx.serialize().hex()], ) self.log.info('A timelocked transaction') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[ 0].nSequence -= 1 # Should be non-max, so locktime is not ignored tx.nLockTime = node.getblockcount() + 1 self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-final' }], rawtxs=[tx.serialize().hex()], ) self.log.info('A transaction that is locked by BIP68 sequence logic') tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference))) tx.vin[ 0].nSequence = 2 # We could include it in the second block mined from now, but not the very next one # Can skip re-signing the tx because of early rejection self.check_mempool_result( result_expected=[{ 'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final' }], rawtxs=[tx.serialize().hex()], maxfeerate=0, )
def run_test(self): self.log.info("test -blocknotify") block_count = 10 blocks = self.nodes[1].generatetoaddress( block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE) # wait at most 10 seconds for expected number of files before reading the content self.wait_until( lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10) # directory content should equal the generated blocks hashes assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir))) if self.is_wallet_compiled(): self.log.info("test -walletnotify") # wait at most 10 seconds for expected number of files before reading the content self.wait_until( lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10) # directory content should equal the generated transaction hashes txids_rpc = list( map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count))) assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir))) self.stop_node(1) for tx_file in os.listdir(self.walletnotify_dir): os.remove(os.path.join(self.walletnotify_dir, tx_file)) self.log.info("test -walletnotify after rescan") # restart node to rescan to force wallet notifications self.start_node(1) self.connect_nodes(0, 1) self.wait_until( lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10) # directory content should equal the generated transaction hashes txids_rpc = list( map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count))) assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir))) for tx_file in os.listdir(self.walletnotify_dir): os.remove(os.path.join(self.walletnotify_dir, tx_file)) # Conflicting transactions tests. Give node 0 same wallet seed as # node 1, generate spends from node 0, and check notifications # triggered by node 1 self.log.info("test -walletnotify with conflicting transactions") self.nodes[0].sethdseed(seed=self.nodes[1].dumpprivkey( keyhash_to_p2pkh( hex_str_to_bytes(self.nodes[1].getwalletinfo()['hdseedid']) [::-1]))) self.nodes[0].rescanblockchain() self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_UNSPENDABLE) self.sync_blocks() # Generate transaction on node 0, sync mempools, and check for # notification on node 1. tx1 = self.nodes[0].sendtoaddress( address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True) assert_equal(tx1 in self.nodes[0].getrawmempool(), True) self.sync_mempools() self.expect_wallet_notify([tx1]) # Generate bump transaction, sync mempools, and check for bump1 # notification. In the future, per # https://github.com/tokyocoin/tokyocoin/pull/9371, it might be better # to have notifications for both tx1 and bump1. bump1 = self.nodes[0].bumpfee(tx1)["txid"] assert_equal(bump1 in self.nodes[0].getrawmempool(), True) self.sync_mempools() self.expect_wallet_notify([bump1]) # Add bump1 transaction to new block, checking for a notification # and the correct number of confirmations. self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) self.sync_blocks() self.expect_wallet_notify([bump1]) assert_equal(self.nodes[1].gettransaction(bump1)["confirmations"], 1) # Generate a second transaction to be bumped. tx2 = self.nodes[0].sendtoaddress( address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True) assert_equal(tx2 in self.nodes[0].getrawmempool(), True) self.sync_mempools() self.expect_wallet_notify([tx2]) # Bump tx2 as bump2 and generate a block on node 0 while # disconnected, then reconnect and check for notifications on node 1 # about newly confirmed bump2 and newly conflicted tx2. self.disconnect_nodes(0, 1) bump2 = self.nodes[0].bumpfee(tx2)["txid"] self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) assert_equal(self.nodes[0].gettransaction(bump2)["confirmations"], 1) assert_equal(tx2 in self.nodes[1].getrawmempool(), True) self.connect_nodes(0, 1) self.sync_blocks() self.expect_wallet_notify([bump2, tx2]) assert_equal(self.nodes[1].gettransaction(bump2)["confirmations"], 1)