def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
     tx = create_transaction(spend_tx, n, b"", value, script)
     return tx
Пример #2
0
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test fee estimation code."""

from test_framework.test_framework import AgenorCoinTestFramework
from test_framework.util import *
from test_framework.script import CScript, OP_1, OP_DROP, OP_2, OP_HASH160, OP_EQUAL, hash160, OP_TRUE
from test_framework.mininode import CTransaction, CTxIn, CTxOut, COutPoint, ToHex, COIN

# Use as minTxFee
MIN_FEE = Decimal("0.0001")

# Construct 2 trivial P2SH's and the ScriptSigs that spend them
# So we can create many transactions without needing to spend
# time signing.
redeem_script_1 = CScript([OP_1, OP_DROP])
redeem_script_2 = CScript([OP_2, OP_DROP])
P2SH_1 = CScript([OP_HASH160, hash160(redeem_script_1), OP_EQUAL])
P2SH_2 = CScript([OP_HASH160, hash160(redeem_script_2), OP_EQUAL])

# Associated ScriptSig's to spend satisfy P2SH_1 and P2SH_2
SCRIPT_SIG = [CScript([OP_TRUE, redeem_script_1]), CScript([OP_TRUE, redeem_script_2])]

global log

def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
    """
    Create and send a transaction with a random fee.
    The transaction pays to a trivial P2SH script, and assumes that its inputs
    are of the same form.
    The function takes a list of confirmed outputs and unconfirmed outputs
Пример #3
0
    def run_test(self):
        self.nodes[0].generate(161)  #block 161

        #         self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
        #         txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        #         tmpl = self.nodes[0].getblocktemplate({})
        #         assert(tmpl['sizelimit'] == 1000000)
        #         assert('weightlimit' not in tmpl)
        #         assert(tmpl['sigoplimit'] == 20000)
        #         assert(tmpl['transactions'][0]['hash'] == txid)
        #         assert(tmpl['transactions'][0]['sigops'] == 2)
        #         tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
        #         assert(tmpl['sizelimit'] == 1000000)
        #         assert('weightlimit' not in tmpl)
        #         assert(tmpl['sigoplimit'] == 20000)
        #         assert(tmpl['transactions'][0]['hash'] == txid)
        #         assert(tmpl['transactions'][0]['sigops'] == 2)
        self.nodes[0].generate(1)  #block 162

        balance_presetup = self.nodes[0].getbalance()
        self.pubkey = []
        p2sh_ids = [
        ]  # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
        wit_ids = [
        ]  # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
        for i in range(3):
            newaddress = self.nodes[i].getnewaddress()
            self.pubkey.append(
                self.nodes[i].validateaddress(newaddress)["pubkey"])
            multiaddress = self.nodes[i].addmultisigaddress(
                1, [self.pubkey[-1]])
            self.nodes[i].addwitnessaddress(newaddress)
            self.nodes[i].addwitnessaddress(multiaddress)
            p2sh_ids.append([])
            wit_ids.append([])
            for v in range(2):
                p2sh_ids[i].append([])
                wit_ids[i].append([])

        for i in range(5):
            for n in range(3):
                for v in range(2):
                    wit_ids[n][v].append(
                        send_to_witness(v, self.nodes[0],
                                        find_unspent(self.nodes[0], 5000),
                                        self.pubkey[n], False,
                                        Decimal("4999.999")))
                    p2sh_ids[n][v].append(
                        send_to_witness(v, self.nodes[0],
                                        find_unspent(self.nodes[0], 5000),
                                        self.pubkey[n], True,
                                        Decimal("4999.999")))

        self.nodes[0].generate(1)  #block 163
        sync_blocks(self.nodes)

        # Make sure all nodes recognize the transactions as theirs
        assert_equal(
            self.nodes[0].getbalance(),
            balance_presetup - 60 * 5000 + 20 * Decimal("4999.999") + 5000)
        assert_equal(self.nodes[1].getbalance(), 20 * Decimal("4999.999"))
        assert_equal(self.nodes[2].getbalance(), 20 * Decimal("4999.999"))

        self.nodes[0].generate(260)  #block 423
        sync_blocks(self.nodes)

        # unsigned, no scriptsig
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag",
                         wit_ids[NODE_0][WIT_V0][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag",
                         wit_ids[NODE_0][WIT_V1][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag",
                         p2sh_ids[NODE_0][WIT_V0][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag",
                         p2sh_ids[NODE_0][WIT_V1][0], False)
        # unsigned with redeem script
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag",
                         p2sh_ids[NODE_0][WIT_V0][0], False,
                         witness_script(False, self.pubkey[0]))
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag",
                         p2sh_ids[NODE_0][WIT_V1][0], False,
                         witness_script(True, self.pubkey[0]))
        # signed
        #         self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V0][0], True)
        #         self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V1][0], True)
        #         self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V0][0], True)
        #         self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V1][0], True)

        #         self.log.info("Verify witness txs are skipped for mining before the fork")
        #         self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424
        #         self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425
        #         self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
        #         self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427

        # TODO: An old node would see these txs without witnesses and be able to mine them

        self.log.info(
            "Verify unsigned bare witness txs in versionbits-setting blocks are valid before the fork"
        )
        self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1],
                          False)  #block 428
        self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1],
                          False)  #block 429

        self.log.info(
            "Verify unsigned p2sh witness txs without a redeem script are invalid"
        )
        self.fail_accept(self.nodes[2], "mandatory-script-verify-flag",
                         p2sh_ids[NODE_2][WIT_V0][1], False)
        self.fail_accept(self.nodes[2], "mandatory-script-verify-flag",
                         p2sh_ids[NODE_2][WIT_V1][1], False)

        self.log.info(
            "Verify unsigned p2sh witness txs with a redeem script in versionbits-settings blocks are valid before the fork"
        )
        self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False,
                          witness_script(False, self.pubkey[2]))  #block 430
        self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False,
                          witness_script(True, self.pubkey[2]))  #block 431

        self.log.info(
            "Verify previous witness txs skipped for mining can now be mined")
        assert_equal(len(self.nodes[2].getrawmempool()), 4)
        block = self.nodes[2].generate(
            1)  #block 432 (first block with new rules; 432 = 144 * 3)
        sync_blocks(self.nodes)
        assert_equal(len(self.nodes[2].getrawmempool()), 0)
        segwit_tx_list = self.nodes[2].getblock(block[0])["tx"]
        assert_equal(len(segwit_tx_list), 5)

        self.log.info(
            "Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag"
        )
        assert (self.nodes[2].getblock(block[0], False) !=
                self.nodes[0].getblock(block[0], False))
        assert (self.nodes[1].getblock(block[0],
                                       False) == self.nodes[2].getblock(
                                           block[0], False))
        for i in range(len(segwit_tx_list)):
            tx = FromHex(
                CTransaction(),
                self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
            assert (self.nodes[2].getrawtransaction(segwit_tx_list[i]) !=
                    self.nodes[0].getrawtransaction(segwit_tx_list[i]))
            assert (self.nodes[1].getrawtransaction(
                segwit_tx_list[i],
                0) == self.nodes[2].getrawtransaction(segwit_tx_list[i]))
            assert (self.nodes[0].getrawtransaction(segwit_tx_list[i]) !=
                    self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
            assert (self.nodes[1].getrawtransaction(
                segwit_tx_list[i]) == self.nodes[2].gettransaction(
                    segwit_tx_list[i])["hex"])
            assert (self.nodes[0].getrawtransaction(
                segwit_tx_list[i]) == bytes_to_hex_str(
                    tx.serialize_without_witness()))

        self.log.info(
            "Verify witness txs without witness data are invalid after the fork"
        )
        self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][2], False)
        self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][2], False)
        self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][2], False,
                       witness_script(False, self.pubkey[2]))
        self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][2], False,
                       witness_script(True, self.pubkey[2]))

        self.log.info("Verify default node can now use witness txs")
        self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0],
                          True)  #block 432
        self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0],
                          True)  #block 433
        self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0],
                          True)  #block 434
        self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0],
                          True)  #block 435

        self.log.info(
            "Verify sigops are counted in GBT with BIP141 rules after the fork"
        )
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']})
        assert (
            tmpl['sizelimit'] >= 3999577
        )  # actual maximum size is lower due to minimum mandatory non-witness data
        assert (tmpl['weightlimit'] == 4000000)
        assert (tmpl['sigoplimit'] == 80000)
        assert (tmpl['transactions'][0]['txid'] == txid)
        assert (tmpl['transactions'][0]['sigops'] == 8)

        self.nodes[0].generate(1)  # Mine a block to clear the gbt cache

        self.log.info(
            "Non-segwit miners are able to use GBT response after activation.")
        # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) ->
        #                      tx2 (segwit input, paying to a non-segwit output) ->
        #                      tx3 (non-segwit input, paying to a non-segwit output).
        # tx1 is allowed to appear in the block, but no others.
        txid1 = send_to_witness(1, self.nodes[0],
                                find_unspent(self.nodes[0], 50),
                                self.pubkey[0], False, Decimal("49.996"))
        hex_tx = self.nodes[0].gettransaction(txid)['hex']
        tx = FromHex(CTransaction(), hex_tx)
        assert (tx.wit.is_null())  # This should not be a segwit input
        assert (txid1 in self.nodes[0].getrawmempool())

        # Now create tx2, which will spend from txid1.
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
        tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE])))
        tx2_hex = self.nodes[0].signrawtransaction(ToHex(tx))['hex']
        txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
        tx = FromHex(CTransaction(), tx2_hex)
        assert (not tx.wit.is_null())

        # Now create tx3, which will spend from txid2
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b""))
        tx.vout.append(CTxOut(int(49.95 * COIN),
                              CScript([OP_TRUE])))  # Huge fee
        tx.calc_sha256()
        txid3 = self.nodes[0].sendrawtransaction(ToHex(tx))
        assert (tx.wit.is_null())
        assert (txid3 in self.nodes[0].getrawmempool())

        # Now try calling getblocktemplate() without segwit support.
        template = self.nodes[0].getblocktemplate()

        # Check that tx1 is the only transaction of the 3 in the template.
        template_txids = [t['txid'] for t in template['transactions']]
        assert (txid2 not in template_txids and txid3 not in template_txids)
        assert (txid1 in template_txids)

        # Check that running with segwit support results in all 3 being included.
        template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
        template_txids = [t['txid'] for t in template['transactions']]
        assert (txid1 in template_txids)
        assert (txid2 in template_txids)
        assert (txid3 in template_txids)

        # Check that wtxid is properly reported in mempool entry
        assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16),
                     tx.calc_sha256(True))

        # Mine a block to clear the gbt cache again.
        self.nodes[0].generate(1)

        self.log.info(
            "Verify behaviour of importaddress, addwitnessaddress and listunspent"
        )

        # Some public keys to be used later
        pubkeys = [
            "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242",  # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
            "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF",  # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
            "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E",  # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
            "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538",  # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
            "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228",  # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
            "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC",  # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
            "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84",  # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
        ]

        # Import a compressed key and an uncompressed key, generate some multisig addresses
        self.nodes[0].importprivkey(
            "92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
        uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
        self.nodes[0].importprivkey(
            "cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
        compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
        assert ((self.nodes[0].validateaddress(
            uncompressed_spendable_address[0])['iscompressed'] == False))
        assert ((self.nodes[0].validateaddress(
            compressed_spendable_address[0])['iscompressed'] == True))

        self.nodes[0].importpubkey(pubkeys[0])
        compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
        self.nodes[0].importpubkey(pubkeys[1])
        compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
        self.nodes[0].importpubkey(pubkeys[2])
        uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]

        spendable_anytime = [
        ]  # These outputs should be seen anytime after importprivkey and addmultisigaddress
        spendable_after_importaddress = [
        ]  # These outputs should be seen after importaddress
        solvable_after_importaddress = [
        ]  # These outputs should be seen after importaddress but not spendable
        unsolvable_after_importaddress = [
        ]  # These outputs should be unsolvable after importaddress
        solvable_anytime = [
        ]  # These outputs should be solvable after importpubkey
        unseen_anytime = []  # These outputs should never be seen

        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(
            2, [
                uncompressed_spendable_address[0],
                compressed_spendable_address[0]
            ]))
        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(
            2, [
                uncompressed_spendable_address[0],
                uncompressed_spendable_address[0]
            ]))
        compressed_spendable_address.append(self.nodes[0].addmultisigaddress(
            2,
            [compressed_spendable_address[0], compressed_spendable_address[0]
             ]))
        uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(
            2, [
                compressed_spendable_address[0],
                uncompressed_solvable_address[0]
            ]))
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(
            2,
            [compressed_spendable_address[0], compressed_solvable_address[0]]))
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(
            2,
            [compressed_solvable_address[0], compressed_solvable_address[1]]))
        unknown_address = [
            "mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT",
            "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"
        ]

        # Test multisig_without_privkey
        # We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
        # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.

        multisig_without_privkey_address = self.nodes[0].addmultisigaddress(
            2, [pubkeys[3], pubkeys[4]])
        script = CScript([
            OP_2,
            hex_str_to_bytes(pubkeys[3]),
            hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG
        ])
        solvable_after_importaddress.append(
            CScript([OP_HASH160, hash160(script), OP_EQUAL]))

        for i in compressed_spendable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh,
                 p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # bare and p2sh multisig with compressed keys should always be spendable
                spendable_anytime.extend([bare, p2sh])
                # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
                spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
            else:
                [
                    p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh,
                    p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with compressed keys should always be spendable
                spendable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK, P2SH_P2PKH, and witness with compressed keys are spendable after direct importaddress
                spendable_after_importaddress.extend([
                    p2wpkh, p2sh_p2wpkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk,
                    p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ])

        for i in uncompressed_spendable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh,
                 p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # bare and p2sh multisig with uncompressed keys should always be spendable
                spendable_anytime.extend([bare, p2sh])
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [
                    p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh,
                    p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with uncompressed keys should always be spendable
                spendable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
                spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
                # witness with uncompressed keys are never seen
                unseen_anytime.extend([
                    p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh,
                    p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ])

        for i in compressed_solvable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                # Multisig without private is not seen after addmultisigaddress, but seen after importaddress
                [bare, p2sh, p2wsh,
                 p2sh_p2wsh] = self.p2sh_address_to_script(v)
                solvable_after_importaddress.extend(
                    [bare, p2sh, p2wsh, p2sh_p2wsh])
            else:
                [
                    p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh,
                    p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with compressed keys should always be seen
                solvable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK, P2SH_P2PKH, and witness with compressed keys are seen after direct importaddress
                solvable_after_importaddress.extend([
                    p2wpkh, p2sh_p2wpkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk,
                    p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ])

        for i in uncompressed_solvable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh,
                 p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
                solvable_after_importaddress.extend([bare, p2sh])
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [
                    p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh,
                    p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with uncompressed keys should always be seen
                solvable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
                solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
                # witness with uncompressed keys are never seen
                unseen_anytime.extend([
                    p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh,
                    p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ])

        op1 = CScript([OP_1])
        op0 = CScript([OP_0])
        # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
        unsolvable_address = [
            "mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V",
            "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe",
            script_to_p2sh(op1),
            script_to_p2sh(op0)
        ]
        unsolvable_address_key = hex_str_to_bytes(
            "02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D"
        )
        unsolvablep2pkh = CScript([
            OP_DUP, OP_HASH160,
            hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG
        ])
        unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)])
        p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL])
        p2wshop1 = CScript([OP_0, sha256(op1)])
        unsolvable_after_importaddress.append(unsolvablep2pkh)
        unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
        unsolvable_after_importaddress.append(
            op1)  # OP_1 will be imported as script
        unsolvable_after_importaddress.append(p2wshop1)
        unseen_anytime.append(
            op0
        )  # OP_0 will be imported as P2SH address with no script provided
        unsolvable_after_importaddress.append(p2shop0)

        spendable_txid = []
        solvable_txid = []
        spendable_txid.append(
            self.mine_and_test_listunspent(spendable_anytime, 2))
        solvable_txid.append(
            self.mine_and_test_listunspent(solvable_anytime, 1))
        self.mine_and_test_listunspent(
            spendable_after_importaddress + solvable_after_importaddress +
            unseen_anytime + unsolvable_after_importaddress, 0)

        importlist = []
        for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                bare = hex_str_to_bytes(v['hex'])
                importlist.append(bytes_to_hex_str(bare))
                importlist.append(
                    bytes_to_hex_str(CScript([OP_0, sha256(bare)])))
            else:
                pubkey = hex_str_to_bytes(v['pubkey'])
                p2pk = CScript([pubkey, OP_CHECKSIG])
                p2pkh = CScript([
                    OP_DUP, OP_HASH160,
                    hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG
                ])
                importlist.append(bytes_to_hex_str(p2pk))
                importlist.append(bytes_to_hex_str(p2pkh))
                importlist.append(
                    bytes_to_hex_str(CScript([OP_0, hash160(pubkey)])))
                importlist.append(
                    bytes_to_hex_str(CScript([OP_0, sha256(p2pk)])))
                importlist.append(
                    bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)])))

        importlist.append(bytes_to_hex_str(unsolvablep2pkh))
        importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh))
        importlist.append(bytes_to_hex_str(op1))
        importlist.append(bytes_to_hex_str(p2wshop1))

        for i in importlist:
            # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
            # exceptions and continue.
            try_rpc(
                -4,
                "The wallet already contains the private key for this address or script",
                self.nodes[0].importaddress, i, "", False, True)

        self.nodes[0].importaddress(
            script_to_p2sh(op0))  # import OP_0 as address only
        self.nodes[0].importaddress(
            multisig_without_privkey_address)  # Test multisig_without_privkey

        spendable_txid.append(
            self.mine_and_test_listunspent(
                spendable_anytime + spendable_after_importaddress, 2))
        solvable_txid.append(
            self.mine_and_test_listunspent(
                solvable_anytime + solvable_after_importaddress, 1))
        self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # addwitnessaddress should refuse to return a witness address if an uncompressed key is used
        # note that no witness address should be returned by unsolvable addresses
        for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address:
            assert_raises_rpc_error(
                -4,
                "Public key or redeemscript not known to wallet, or the key is uncompressed",
                self.nodes[0].addwitnessaddress, i)

        # addwitnessaddress should return a witness addresses even if keys are not in the wallet
        self.nodes[0].addwitnessaddress(multisig_without_privkey_address)

        for i in compressed_spendable_address + compressed_solvable_address:
            witaddress = self.nodes[0].addwitnessaddress(i)
            # addwitnessaddress should return the same address if it is a known P2SH-witness address
            assert_equal(witaddress,
                         self.nodes[0].addwitnessaddress(witaddress))

        spendable_txid.append(
            self.mine_and_test_listunspent(
                spendable_anytime + spendable_after_importaddress, 2))
        solvable_txid.append(
            self.mine_and_test_listunspent(
                solvable_anytime + solvable_after_importaddress, 1))
        self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # Repeat some tests. This time we don't add witness scripts with importaddress
        # Import a compressed key and an uncompressed key, generate some multisig addresses
        self.nodes[0].importprivkey(
            "927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
        uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
        self.nodes[0].importprivkey(
            "cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
        compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]

        self.nodes[0].importpubkey(pubkeys[5])
        compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
        self.nodes[0].importpubkey(pubkeys[6])
        uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]

        spendable_after_addwitnessaddress = [
        ]  # These outputs should be seen after importaddress
        solvable_after_addwitnessaddress = [
        ]  # These outputs should be seen after importaddress but not spendable
        unseen_anytime = []  # These outputs should never be seen

        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(
            2, [
                uncompressed_spendable_address[0],
                compressed_spendable_address[0]
            ]))
        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(
            2, [
                uncompressed_spendable_address[0],
                uncompressed_spendable_address[0]
            ]))
        compressed_spendable_address.append(self.nodes[0].addmultisigaddress(
            2,
            [compressed_spendable_address[0], compressed_spendable_address[0]
             ]))
        uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(
            2,
            [compressed_solvable_address[0], uncompressed_solvable_address[0]
             ]))
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(
            2,
            [compressed_spendable_address[0], compressed_solvable_address[0]]))

        premature_witaddress = []

        for i in compressed_spendable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh,
                 p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress
                spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
                premature_witaddress.append(script_to_p2sh(p2wsh))
            else:
                [
                    p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh,
                    p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ] = self.p2pkh_address_to_script(v)
                # P2WPKH, P2SH_P2WPKH are spendable after addwitnessaddress
                spendable_after_addwitnessaddress.extend([p2wpkh, p2sh_p2wpkh])
                premature_witaddress.append(script_to_p2sh(p2wpkh))

        for i in uncompressed_spendable_address + uncompressed_solvable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh,
                 p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [
                    p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh,
                    p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ] = self.p2pkh_address_to_script(v)
                # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
                unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])

        for i in compressed_solvable_address:
            v = self.nodes[0].validateaddress(i)
            if (v['isscript']):
                # P2WSH multisig without private key are seen after addwitnessaddress
                [bare, p2sh, p2wsh,
                 p2sh_p2wsh] = self.p2sh_address_to_script(v)
                solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
                premature_witaddress.append(script_to_p2sh(p2wsh))
            else:
                [
                    p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh,
                    p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh
                ] = self.p2pkh_address_to_script(v)
                # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after addwitnessaddress
                solvable_after_addwitnessaddress.extend([p2wpkh, p2sh_p2wpkh])
                premature_witaddress.append(script_to_p2sh(p2wpkh))

        self.mine_and_test_listunspent(
            spendable_after_addwitnessaddress +
            solvable_after_addwitnessaddress + unseen_anytime, 0)

        # addwitnessaddress should refuse to return a witness address if an uncompressed key is used
        # note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress
        # premature_witaddress are not accepted until the script is added with addwitnessaddress first
        for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress:
            # This will raise an exception
            assert_raises_rpc_error(
                -4,
                "Public key or redeemscript not known to wallet, or the key is uncompressed",
                self.nodes[0].addwitnessaddress, i)

        # after importaddress it should pass addwitnessaddress
        v = self.nodes[0].validateaddress(compressed_solvable_address[1])
        self.nodes[0].importaddress(v['hex'], "", False, True)
        for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress:
            witaddress = self.nodes[0].addwitnessaddress(i)
            assert_equal(witaddress,
                         self.nodes[0].addwitnessaddress(witaddress))

        spendable_txid.append(
            self.mine_and_test_listunspent(spendable_after_addwitnessaddress,
                                           2))
        solvable_txid.append(
            self.mine_and_test_listunspent(solvable_after_addwitnessaddress,
                                           1))
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # Check that spendable outputs are really spendable
        self.create_and_mine_tx_from_txids(spendable_txid)

        # import all the private keys so solvable addresses become spendable
        self.nodes[0].importprivkey(
            "cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
        self.nodes[0].importprivkey(
            "cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
        self.nodes[0].importprivkey(
            "91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
        self.nodes[0].importprivkey(
            "cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
        self.nodes[0].importprivkey(
            "cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
        self.nodes[0].importprivkey(
            "cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
        self.create_and_mine_tx_from_txids(solvable_txid)
Пример #4
0
    def check_tx_relay(self):
        block_op_true = self.nodes[0].getblock(self.nodes[0].generatetoaddress(
            100, ADDRESS_BCRT1_P2WSH_OP_TRUE)[0])
        self.sync_all()

        self.log.debug(
            "Create a connection from a forcerelay peer that rebroadcasts raw txs"
        )
        # A test framework p2p connection is needed to send the raw transaction directly. If a full node was used, it could only
        # rebroadcast via the inv-getdata mechanism. However, even for forcerelay connections, a full node would
        # currently not request a txid that is already in the mempool.
        self.restart_node(1, extra_args=["[email protected]"])
        p2p_rebroadcast_wallet = self.nodes[1].add_p2p_connection(
            P2PDataStore())

        self.log.debug("Send a tx from the wallet initially")
        tx = tx_from_hex(
            self.nodes[0].createrawtransaction(inputs=[{
                'txid':
                block_op_true['tx'][0],
                'vout':
                0,
            }],
                                               outputs=[{
                                                   ADDRESS_BCRT1_P2WSH_OP_TRUE:
                                                   5,
                                               }]), )
        tx.wit.vtxinwit = [CTxInWitness()]
        tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
        txid = tx.rehash()

        self.log.debug("Wait until tx is in node[1]'s mempool")
        p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])

        self.log.debug(
            "Check that node[1] will send the tx to node[0] even though it is already in the mempool"
        )
        self.connect_nodes(1, 0)
        with self.nodes[1].assert_debug_log(
            ["Force relaying tx {} from peer=0".format(txid)]):
            p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
            self.wait_until(lambda: txid in self.nodes[0].getrawmempool())

        self.log.debug(
            "Check that node[1] will not send an invalid tx to node[0]")
        tx.vout[0].nValue += 1
        txid = tx.rehash()
        # Send the transaction twice. The first time, it'll be rejected by ATMP because it conflicts
        # with a mempool transaction. The second time, it'll be in the m_recent_rejects filter.
        p2p_rebroadcast_wallet.send_txs_and_test(
            [tx],
            self.nodes[1],
            success=False,
            reject_reason='{} from peer=0 was not accepted: txn-mempool-conflict'
            .format(txid))

        p2p_rebroadcast_wallet.send_txs_and_test(
            [tx],
            self.nodes[1],
            success=False,
            reject_reason=
            'Not relaying non-mempool transaction {} from forcerelay peer=0'.
            format(txid))
    def test_sequence_lock_confirmed_inputs(self):
        # Create lots of confirmed utxos, and use them to generate lots of random
        # transactions.
        max_outputs = 50
        addresses = []
        while len(addresses) < max_outputs:
            addresses.append(self.nodes[0].getnewaddress())
        while len(self.nodes[0].listunspent()) < 200:
            import random
            random.shuffle(addresses)
            num_outputs = random.randint(1, max_outputs)
            outputs = {}
            for i in range(num_outputs):
                outputs[addresses[i]] = random.randint(1, 20) * 0.01
            self.nodes[0].sendmany("", outputs)
            self.nodes[0].generate(1)

        utxos = self.nodes[0].listunspent()

        # Try creating a lot of random transactions.
        # Each time, choose a random number of inputs, and randomly set
        # some of those inputs to be sequence locked (and randomly choose
        # between height/time locking). Small random chance of making the locks
        # all pass.
        for i in range(400):
            # Randomly choose up to 10 inputs
            num_inputs = random.randint(1, 10)
            random.shuffle(utxos)

            # Track whether any sequence locks used should fail
            should_pass = True

            # Track whether this transaction was built with sequence locks
            using_sequence_locks = False

            tx = CTransaction()
            tx.nVersion = 2
            value = 0
            for j in range(num_inputs):
                sequence_value = 0xfffffffe  # this disables sequence locks

                # 50% chance we enable sequence locks
                if random.randint(0, 1):
                    using_sequence_locks = True

                    # 10% of the time, make the input sequence value pass
                    input_will_pass = (random.randint(1, 10) == 1)
                    sequence_value = utxos[j]["confirmations"]
                    if not input_will_pass:
                        sequence_value += 1
                        should_pass = False

                    # Figure out what the median-time-past was for the confirmed input
                    # Note that if an input has N confirmations, we're going back N blocks
                    # from the tip so that we're looking up MTP of the block
                    # PRIOR to the one the input appears in, as per the BIP68 spec.
                    orig_time = self.get_median_time_past(
                        utxos[j]["confirmations"])
                    cur_time = self.get_median_time_past(0)  # MTP of the tip

                    # can only timelock this input if it's not too old -- otherwise use height
                    can_time_lock = True
                    if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY
                        ) >= SEQUENCE_LOCKTIME_MASK:
                        can_time_lock = False

                    # if time-lockable, then 50% chance we make this a time lock
                    if random.randint(0, 1) and can_time_lock:
                        # Find first time-lock value that fails, or latest one that succeeds
                        time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
                        if input_will_pass and time_delta > cur_time - orig_time:
                            sequence_value = ((cur_time - orig_time) >>
                                              SEQUENCE_LOCKTIME_GRANULARITY)
                        elif (not input_will_pass
                              and time_delta <= cur_time - orig_time):
                            sequence_value = (
                                (cur_time - orig_time) >>
                                SEQUENCE_LOCKTIME_GRANULARITY) + 1
                        sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
                tx.vin.append(
                    CTxIn(COutPoint(int(utxos[j]["txid"], 16),
                                    utxos[j]["vout"]),
                          nSequence=sequence_value))
                value += utxos[j]["amount"] * COIN
            # Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output
            tx_size = len(ToHex(tx)) // 2 + 120 * num_inputs + 50
            tx.vout.append(
                CTxOut(int(value - self.relayfee * tx_size * COIN / 1000),
                       CScript([b'a'])))
            rawtx = self.nodes[0].signrawtransactionwithwallet(
                ToHex(tx))["hex"]

            if (using_sequence_locks and not should_pass):
                # This transaction should be rejected
                assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                        self.nodes[0].sendrawtransaction,
                                        rawtx)
            else:
                # This raw transaction should be accepted
                self.nodes[0].sendrawtransaction(rawtx)
                utxos = self.nodes[0].listunspent()
Пример #6
0
    def run_test(self):
        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())

        # Build the blockchain
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.block_time = self.nodes[0].getblock(
            self.nodes[0].getbestblockhash())['time'] + 1

        self.blocks = []

        # Get a pubkey for the coinbase TXO
        coinbase_key = CECKey()
        coinbase_key.set_secretbytes(b"horsebattery")
        coinbase_pubkey = coinbase_key.get_pubkey()

        # Create the first block with a coinbase output to our key
        height = 1
        block = create_block(self.tip, create_coinbase(height,
                                                       coinbase_pubkey),
                             self.block_time)
        self.blocks.append(block)
        self.block_time += 1
        block.solve()
        # Save the coinbase for later
        self.block1 = block
        self.tip = block.sha256
        height += 1

        # Bury the block 100 deep so the coinbase output is spendable
        for i in range(100):
            block = create_block(self.tip, create_coinbase(height),
                                 self.block_time)
            block.solve()
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        # Create a transaction spending the coinbase output with an invalid (null) signature
        tx = CTransaction()
        tx.vin.append(
            CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b""))
        tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE])))
        tx.calc_sha256()

        block102 = create_block(self.tip, create_coinbase(height),
                                self.block_time)
        self.block_time += 1
        block102.vtx.extend([tx])
        block102.hashMerkleRoot = block102.calc_merkle_root()
        block102.rehash()
        block102.solve()
        self.blocks.append(block102)
        self.tip = block102.sha256
        self.block_time += 1
        height += 1

        # Bury the assumed valid block 2100 deep
        for i in range(2100):
            block = create_block(self.tip, create_coinbase(height),
                                 self.block_time)
            block.nVersion = 4
            block.solve()
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        self.nodes[0].disconnect_p2ps()

        # Start node1 and node2 with assumevalid so they accept a block with a bad signature.
        self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)])
        self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)])

        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
        p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
        p2p2 = self.nodes[2].add_p2p_connection(BaseNode())

        # send header lists to all three nodes
        p2p0.send_header_for_blocks(self.blocks[0:2000])
        p2p0.send_header_for_blocks(self.blocks[2000:])
        p2p1.send_header_for_blocks(self.blocks[0:2000])
        p2p1.send_header_for_blocks(self.blocks[2000:])
        p2p2.send_header_for_blocks(self.blocks[0:200])

        # Send blocks to node0. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p0)
        self.assert_blockchain_height(self.nodes[0], 101)

        # Send all blocks to node1. All blocks will be accepted.
        for i in range(2202):
            p2p1.send_message(msg_block(self.blocks[i]))
        # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
        p2p1.sync_with_ping(120)
        assert_equal(
            self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'],
            2202)

        # Send blocks to node2. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p2)
        self.assert_blockchain_height(self.nodes[2], 101)
Пример #7
0
    def test_namescript_p2sh(self):
        """
    Tests how name prefixes interact with P2SH outputs and redeem scripts.
    """

        self.log.info("Testing name prefix and P2SH interactions...")

        # This test only needs a single node and no syncing.
        node = self.nodes[0]

        name = "d/p2sh"
        value = "value"
        new = node.name_new(name)
        node.generate(12)
        self.firstupdateName(0, name, new, value)
        node.generate(1)
        baseHeight = node.getblockcount()
        self.checkNameWithHeight(0, name, value, baseHeight)

        # Prepare some scripts and P2SH addresses we use later.  We build the
        # name script prefix for an update to our testname, so that we can build
        # P2SH redeem scripts with (or without) it.

        nameBytes = codecs.encode(name, 'ascii')
        valueBytes = codecs.encode(value, 'ascii')
        updOps = [OP_NAME_UPDATE, nameBytes, valueBytes, OP_2DROP, OP_DROP]
        anyoneOps = [OP_TRUE]

        updScript = CScript(updOps)
        anyoneScript = CScript(anyoneOps)
        updAndAnyoneScript = CScript(updOps + anyoneOps)

        anyoneAddr = self.getP2SH(0, anyoneScript)
        updAndAnyoneAddr = self.getP2SH(0, updAndAnyoneScript)

        # Send the name to the anyone-can-spend name-update script directly.
        # This is expected to update the name (verifies the update script is good).

        tx = CTransaction()
        tx.nVersion = NAMECOIN_TX_VERSION
        data = node.name_show(name)
        tx.vin.append(CTxIn(COutPoint(int(data['txid'], 16), data['vout'])))
        tx.vout.append(CTxOut(COIN // 100, updAndAnyoneScript))
        txHex = tx.serialize().hex()

        txHex = node.fundrawtransaction(txHex)['hex']
        signed = node.signrawtransactionwithwallet(txHex)
        assert signed['complete']
        node.sendrawtransaction(signed['hex'])

        node.generate(1)
        self.checkNameWithHeight(0, name, value, baseHeight + 1)

        # Send the name to the anyone-can-spend P2SH address.  This should just
        # work fine and update the name.
        self.updateAnyoneCanSpendName(0, name, "value2", anyoneAddr, [])
        node.generate(1)
        self.checkNameWithHeight(0, name, "value2", baseHeight + 2)

        # Send a coin to the P2SH address with name prefix.  This should just
        # work fine but not update the name.  We should be able to spend the coin
        # again from that address.

        txid = node.sendtoaddress(updAndAnyoneAddr, 2)
        tx = node.getrawtransaction(txid)
        ind = self.rawtxOutputIndex(0, tx, updAndAnyoneAddr)
        node.generate(1)

        ins = [{"txid": txid, "vout": ind}]
        addr = node.getnewaddress()
        out = {addr: 1}
        tx = node.createrawtransaction(ins, out)
        tx = self.setScriptSigOps(tx, 0, [updAndAnyoneScript])

        node.sendrawtransaction(tx, 0)
        node.generate(1)
        self.checkNameWithHeight(0, name, "value2", baseHeight + 2)

        found = False
        for u in node.listunspent():
            if u['address'] == addr and u['amount'] == 1:
                found = True
                break
        if not found:
            raise AssertionError("Coin not sent to expected address")

        # Send the name to the P2SH address with name prefix and then spend it
        # again.  Spending should work fine, and the name should just be updated
        # ordinarily; the name prefix of the redeem script should have no effect.
        self.updateAnyoneCanSpendName(0, name, "value3", updAndAnyoneAddr,
                                      [anyoneScript])
        node.generate(1)
        self.checkNameWithHeight(0, name, "value3", baseHeight + 5)
        self.updateAnyoneCanSpendName(0, name, "value4", anyoneAddr,
                                      [updAndAnyoneScript])
        node.generate(1)
        self.checkNameWithHeight(0, name, "value4", baseHeight + 6)
#!/usr/bin/env python3
# Copyright (c) 2019 Bitcoin Association
# Copyright (c) 2020* Jimmy N. Lose
# * Gregorian calendar years
# Distributed under the Open BSV software license, see the accompanying file LICENSE.

from test_framework.cdefs import MAX_TX_SIZE_CONSENSUS_BEFORE_GENESIS
from genesis_upgrade_tests.test_base import GenesisHeightTestsCaseBase, GenesisHeightBasedSimpleTestsCase
from genesis_upgrade_tests.tx_size_policy_limit import new_transaction, make_key
from test_framework.key import CECKey
from test_framework.mininode import CTransaction, COutPoint, CTxIn, CTxOut
from test_framework.script import CScript, OP_FALSE, OP_RETURN, SignatureHashForkId, SignatureHash, SIGHASH_ALL, \
    SIGHASH_FORKID, OP_CHECKSIG

SIMPLE_OUTPUT_SCRIPT = CScript([OP_FALSE,OP_RETURN]) # Output script used by spend transactions. Could be anything that is standard, but OP_FALSE OP_RETURN is the easiest to create.

class TxSizeConsensusCaseTest(GenesisHeightTestsCaseBase):

    NAME = "Max consensus tx size"
    _UTXO_KEY = make_key()    
    ARGS = GenesisHeightTestsCaseBase.ARGS + ['-banscore=1000000', '-whitelist=127.0.0.1']

    def get_transactions_for_test(self, tx_collection, coinbases):
        if tx_collection.label == "PRE-GENESIS":
            utxos, data = self.utxos["PRE-GENESIS"]
            tx = new_transaction(self._UTXO_KEY, utxos.pop(0), MAX_TX_SIZE_CONSENSUS_BEFORE_GENESIS)
            tx_collection.add_tx(tx)
            tx = new_transaction(self._UTXO_KEY, utxos.pop(0), MAX_TX_SIZE_CONSENSUS_BEFORE_GENESIS + 1)
            tx_collection.add_tx(tx,
               p2p_reject_reason = b'flexible-bad-txns-oversize',
               block_reject_reason=b'bad-txns-oversize')
    def run_test(self):
        self.log.info("Mining blocks...")
        self.nodes[0].generate(1)
        self.nodes[1].generate(1)
        timestamp = self.nodes[1].getblock(
            self.nodes[1].getbestblockhash())['mediantime']

        node0_address1 = self.nodes[0].getaddressinfo(
            self.nodes[0].getnewaddress())

        # Check only one address
        assert_equal(node0_address1['ismine'], True)

        # Node 1 sync test
        assert_equal(self.nodes[1].getblockcount(), 1)

        # Address Test - before import
        address_info = self.nodes[1].getaddressinfo(node0_address1['address'])
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        # RPC importmulti -----------------------------------------------

        # Gthpcoin Address (implicit non-internal)
        self.log.info("Should import an address")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now"
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     timestamp=timestamp,
                     ischange=False)
        watchonly_address = key.p2pkh_addr
        watchonly_timestamp = timestamp

        self.log.info("Should not import an invalid address")
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": "not valid address"
                },
                "timestamp": "now"
            },
            success=False,
            error_code=-5,
            error_message='Invalid address \"not valid address\"')

        # ScriptPubKey + internal
        self.log.info("Should import a scriptPubKey with internal flag")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": key.p2pkh_script,
                "timestamp": "now",
                "internal": True
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     timestamp=timestamp,
                     ischange=True)

        # ScriptPubKey + internal + label
        self.log.info(
            "Should not allow a label to be specified when internal is true")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": key.p2pkh_script,
                "timestamp": "now",
                "internal": True,
                "label": "Example label"
            },
            success=False,
            error_code=-8,
            error_message='Internal addresses should not have a label')

        # Nonstandard scriptPubKey + !internal
        self.log.info(
            "Should not import a nonstandard scriptPubKey without internal flag"
        )
        nonstandardScriptPubKey = key.p2pkh_script + CScript([OP_NOP]).hex()
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": nonstandardScriptPubKey,
                "timestamp": "now"
            },
            success=False,
            error_code=-8,
            error_message=
            'Internal must be set to true for nonstandard scriptPubKey imports.'
        )
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=False,
                     ismine=False,
                     timestamp=None)

        # Address + Public key + !Internal(explicit)
        self.log.info("Should import an address with public key")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now",
                "pubkeys": [key.pubkey],
                "internal": False
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     timestamp=timestamp)

        # ScriptPubKey + Public key + internal
        self.log.info(
            "Should import a scriptPubKey with internal and with public key")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": key.p2pkh_script,
                "timestamp": "now",
                "pubkeys": [key.pubkey],
                "internal": True
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     timestamp=timestamp)

        # Nonstandard scriptPubKey + Public key + !internal
        self.log.info(
            "Should not import a nonstandard scriptPubKey without internal and with public key"
        )
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": nonstandardScriptPubKey,
                "timestamp": "now",
                "pubkeys": [key.pubkey]
            },
            success=False,
            error_code=-8,
            error_message=
            'Internal must be set to true for nonstandard scriptPubKey imports.'
        )
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=False,
                     ismine=False,
                     timestamp=None)

        # Address + Private key + !watchonly
        self.log.info("Should import an address with private key")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now",
                "keys": [key.privkey]
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=False,
                     ismine=True,
                     timestamp=timestamp)

        self.log.info(
            "Should not import an address with private key if is already imported"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now",
                "keys": [key.privkey]
            },
            success=False,
            error_code=-4,
            error_message=
            'The wallet already contains the private key for this address or script ("'
            + key.p2pkh_script + '")')

        # Address + Private key + watchonly
        self.log.info(
            "Should import an address with private key and with watchonly")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now",
                "keys": [key.privkey],
                "watchonly": True
            },
            success=True,
            warnings=[
                "All private keys are provided, outputs will be considered spendable. If this is intentional, do not specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=False,
                     ismine=True,
                     timestamp=timestamp)

        # ScriptPubKey + Private key + internal
        self.log.info(
            "Should import a scriptPubKey with internal and with private key")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": key.p2pkh_script,
                "timestamp": "now",
                "keys": [key.privkey],
                "internal": True
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=False,
                     ismine=True,
                     timestamp=timestamp)

        # Nonstandard scriptPubKey + Private key + !internal
        self.log.info(
            "Should not import a nonstandard scriptPubKey without internal and with private key"
        )
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": nonstandardScriptPubKey,
                "timestamp": "now",
                "keys": [key.privkey]
            },
            success=False,
            error_code=-8,
            error_message=
            'Internal must be set to true for nonstandard scriptPubKey imports.'
        )
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=False,
                     ismine=False,
                     timestamp=None)

        # P2SH address
        multisig = get_multisig(self.nodes[0])
        self.nodes[1].generate(100)
        self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
        self.nodes[1].generate(1)
        timestamp = self.nodes[1].getblock(
            self.nodes[1].getbestblockhash())['mediantime']

        self.log.info("Should import a p2sh")
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": multisig.p2sh_addr
                },
                "timestamp": "now"
            },
            success=True)
        test_address(self.nodes[1],
                     multisig.p2sh_addr,
                     isscript=True,
                     iswatchonly=True,
                     timestamp=timestamp)
        p2shunspent = self.nodes[1].listunspent(0, 999999,
                                                [multisig.p2sh_addr])[0]
        assert_equal(p2shunspent['spendable'], False)
        assert_equal(p2shunspent['solvable'], False)

        # P2SH + Redeem script
        multisig = get_multisig(self.nodes[0])
        self.nodes[1].generate(100)
        self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
        self.nodes[1].generate(1)
        timestamp = self.nodes[1].getblock(
            self.nodes[1].getbestblockhash())['mediantime']

        self.log.info("Should import a p2sh with respective redeem script")
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": multisig.p2sh_addr
                },
                "timestamp": "now",
                "redeemscript": multisig.redeem_script
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     multisig.p2sh_addr,
                     timestamp=timestamp,
                     iswatchonly=True,
                     ismine=False,
                     solvable=True)

        p2shunspent = self.nodes[1].listunspent(0, 999999,
                                                [multisig.p2sh_addr])[0]
        assert_equal(p2shunspent['spendable'], False)
        assert_equal(p2shunspent['solvable'], True)

        # P2SH + Redeem script + Private Keys + !Watchonly
        multisig = get_multisig(self.nodes[0])
        self.nodes[1].generate(100)
        self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
        self.nodes[1].generate(1)
        timestamp = self.nodes[1].getblock(
            self.nodes[1].getbestblockhash())['mediantime']

        self.log.info(
            "Should import a p2sh with respective redeem script and private keys"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": multisig.p2sh_addr
                },
                "timestamp": "now",
                "redeemscript": multisig.redeem_script,
                "keys": multisig.privkeys[0:2]
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     multisig.p2sh_addr,
                     timestamp=timestamp,
                     ismine=False,
                     iswatchonly=True,
                     solvable=True)

        p2shunspent = self.nodes[1].listunspent(0, 999999,
                                                [multisig.p2sh_addr])[0]
        assert_equal(p2shunspent['spendable'], False)
        assert_equal(p2shunspent['solvable'], True)

        # P2SH + Redeem script + Private Keys + Watchonly
        multisig = get_multisig(self.nodes[0])
        self.nodes[1].generate(100)
        self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
        self.nodes[1].generate(1)
        timestamp = self.nodes[1].getblock(
            self.nodes[1].getbestblockhash())['mediantime']

        self.log.info(
            "Should import a p2sh with respective redeem script and private keys"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": multisig.p2sh_addr
                },
                "timestamp": "now",
                "redeemscript": multisig.redeem_script,
                "keys": multisig.privkeys[0:2],
                "watchonly": True
            },
            success=True)
        test_address(self.nodes[1],
                     multisig.p2sh_addr,
                     iswatchonly=True,
                     ismine=False,
                     solvable=True,
                     timestamp=timestamp)

        # Address + Public key + !Internal + Wrong pubkey
        self.log.info(
            "Should not import an address with the wrong public key as non-solvable"
        )
        key = get_key(self.nodes[0])
        wrong_key = get_key(self.nodes[0]).pubkey
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now",
                "pubkeys": [wrong_key]
            },
            success=True,
            warnings=[
                "Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.",
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     solvable=False,
                     timestamp=timestamp)

        # ScriptPubKey + Public key + internal + Wrong pubkey
        self.log.info(
            "Should import a scriptPubKey with internal and with a wrong public key as non-solvable"
        )
        key = get_key(self.nodes[0])
        wrong_key = get_key(self.nodes[0]).pubkey
        self.test_importmulti(
            {
                "scriptPubKey": key.p2pkh_script,
                "timestamp": "now",
                "pubkeys": [wrong_key],
                "internal": True
            },
            success=True,
            warnings=[
                "Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.",
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     solvable=False,
                     timestamp=timestamp)

        # Address + Private key + !watchonly + Wrong private key
        self.log.info(
            "Should import an address with a wrong private key as non-solvable"
        )
        key = get_key(self.nodes[0])
        wrong_privkey = get_key(self.nodes[0]).privkey
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now",
                "keys": [wrong_privkey]
            },
            success=True,
            warnings=[
                "Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.",
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     solvable=False,
                     timestamp=timestamp)

        # ScriptPubKey + Private key + internal + Wrong private key
        self.log.info(
            "Should import a scriptPubKey with internal and with a wrong private key as non-solvable"
        )
        key = get_key(self.nodes[0])
        wrong_privkey = get_key(self.nodes[0]).privkey
        self.test_importmulti(
            {
                "scriptPubKey": key.p2pkh_script,
                "timestamp": "now",
                "keys": [wrong_privkey],
                "internal": True
            },
            success=True,
            warnings=[
                "Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.",
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     iswatchonly=True,
                     ismine=False,
                     solvable=False,
                     timestamp=timestamp)

        # Importing existing watch only address with new timestamp should replace saved timestamp.
        assert_greater_than(timestamp, watchonly_timestamp)
        self.log.info("Should replace previously saved watch only timestamp.")
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": watchonly_address
                },
                "timestamp": "now"
            },
            success=True)
        test_address(self.nodes[1],
                     watchonly_address,
                     iswatchonly=True,
                     ismine=False,
                     timestamp=timestamp)
        watchonly_timestamp = timestamp

        # restart nodes to check for proper serialization/deserialization of watch only address
        self.stop_nodes()
        self.start_nodes()
        test_address(self.nodes[1],
                     watchonly_address,
                     iswatchonly=True,
                     ismine=False,
                     timestamp=watchonly_timestamp)

        # Bad or missing timestamps
        self.log.info("Should throw on invalid or missing timestamp values")
        assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
                                self.nodes[1].importmulti,
                                [{
                                    "scriptPubKey": key.p2pkh_script
                                }])
        assert_raises_rpc_error(
            -3,
            'Expected number or "now" timestamp value for key. got type string',
            self.nodes[1].importmulti, [{
                "scriptPubKey": key.p2pkh_script,
                "timestamp": ""
            }])

        # Import P2WPKH address as watch only
        self.log.info("Should import a P2WPKH address as watch only")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2wpkh_addr
                },
                "timestamp": "now"
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2wpkh_addr,
                     iswatchonly=True,
                     solvable=False)

        # Import P2WPKH address with public key but no private key
        self.log.info(
            "Should import a P2WPKH address and public key as solvable but not spendable"
        )
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2wpkh_addr
                },
                "timestamp": "now",
                "pubkeys": [key.pubkey]
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2wpkh_addr,
                     ismine=False,
                     solvable=True)

        # Import P2WPKH address with key and check it is spendable
        self.log.info("Should import a P2WPKH address with key")
        key = get_key(self.nodes[0])
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2wpkh_addr
                },
                "timestamp": "now",
                "keys": [key.privkey]
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2wpkh_addr,
                     iswatchonly=False,
                     ismine=True)

        # P2WSH multisig address without scripts or keys
        multisig = get_multisig(self.nodes[0])
        self.log.info(
            "Should import a p2wsh multisig as watch only without respective redeem script and private keys"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": multisig.p2wsh_addr
                },
                "timestamp": "now"
            },
            success=True)
        test_address(self.nodes[1], multisig.p2sh_addr, solvable=False)

        # Same P2WSH multisig address as above, but now with witnessscript + private keys
        self.log.info(
            "Should import a p2wsh with respective witness script and private keys"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": multisig.p2wsh_addr
                },
                "timestamp": "now",
                "witnessscript": multisig.redeem_script,
                "keys": multisig.privkeys
            },
            success=True)
        test_address(self.nodes[1],
                     multisig.p2sh_addr,
                     solvable=True,
                     ismine=True,
                     sigsrequired=2)

        # P2SH-P2WPKH address with no redeemscript or public or private key
        key = get_key(self.nodes[0])
        self.log.info(
            "Should import a p2sh-p2wpkh without redeem script or keys")
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2sh_p2wpkh_addr
                },
                "timestamp": "now"
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2sh_p2wpkh_addr,
                     solvable=False,
                     ismine=False)

        # P2SH-P2WPKH address + redeemscript + public key with no private key
        self.log.info(
            "Should import a p2sh-p2wpkh with respective redeem script and pubkey as solvable"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2sh_p2wpkh_addr
                },
                "timestamp": "now",
                "redeemscript": key.p2sh_p2wpkh_redeem_script,
                "pubkeys": [key.pubkey]
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2sh_p2wpkh_addr,
                     solvable=True,
                     ismine=False)

        # P2SH-P2WPKH address + redeemscript + private key
        key = get_key(self.nodes[0])
        self.log.info(
            "Should import a p2sh-p2wpkh with respective redeem script and private keys"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": key.p2sh_p2wpkh_addr
                },
                "timestamp": "now",
                "redeemscript": key.p2sh_p2wpkh_redeem_script,
                "keys": [key.privkey]
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2sh_p2wpkh_addr,
                     solvable=True,
                     ismine=True)

        # P2SH-P2WSH multisig + redeemscript with no private key
        multisig = get_multisig(self.nodes[0])
        self.log.info(
            "Should import a p2sh-p2wsh with respective redeem script but no private key"
        )
        self.test_importmulti(
            {
                "scriptPubKey": {
                    "address": multisig.p2sh_p2wsh_addr
                },
                "timestamp": "now",
                "redeemscript": multisig.p2wsh_script,
                "witnessscript": multisig.redeem_script
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     multisig.p2sh_p2wsh_addr,
                     solvable=True,
                     ismine=False)

        # Test importing of a P2SH-P2WPKH address via descriptor + private key
        key = get_key(self.nodes[0])
        self.log.info(
            "Should not import a p2sh-p2wpkh address from descriptor without checksum and private key"
        )
        self.test_importmulti(
            {
                "desc": "sh(wpkh(" + key.pubkey + "))",
                "timestamp": "now",
                "label": "Descriptor import test",
                "keys": [key.privkey]
            },
            success=False,
            error_code=-5,
            error_message="Descriptor is invalid")

        # Test importing of a P2SH-P2WPKH address via descriptor + private key
        key = get_key(self.nodes[0])
        self.log.info(
            "Should import a p2sh-p2wpkh address from descriptor and private key"
        )
        self.test_importmulti(
            {
                "desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
                "timestamp": "now",
                "label": "Descriptor import test",
                "keys": [key.privkey]
            },
            success=True)
        test_address(self.nodes[1],
                     key.p2sh_p2wpkh_addr,
                     solvable=True,
                     ismine=True,
                     label="Descriptor import test")

        # Test ranged descriptor fails if range is not specified
        xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
        addresses = [
            "2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf",
            "2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"
        ]  # hdkeypath=m/0'/0'/0' and 1'
        desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
        self.log.info(
            "Ranged descriptor import should fail without a specified range")
        self.test_importmulti(
            {
                "desc": descsum_create(desc),
                "timestamp": "now"
            },
            success=False,
            error_code=-8,
            error_message='Descriptor is ranged, please specify the range')

        # Test importing of a ranged descriptor without keys
        self.log.info(
            "Should import the ranged descriptor with specified range as solvable"
        )
        self.test_importmulti(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": 1
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        for address in addresses:
            test_address(self.nodes[1], key.p2sh_p2wpkh_addr, solvable=True)

        self.test_importmulti(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": -1
            },
            success=False,
            error_code=-8,
            error_message='End of range is too high')

        self.test_importmulti(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [-1, 10]
            },
            success=False,
            error_code=-8,
            error_message='Range should be greater or equal than 0')

        self.test_importmulti(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]
            },
            success=False,
            error_code=-8,
            error_message='End of range is too high')

        self.test_importmulti(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [2, 1]
            },
            success=False,
            error_code=-8,
            error_message=
            'Range specified as [begin,end] must not have begin after end')

        self.test_importmulti(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [0, 1000001]
            },
            success=False,
            error_code=-8,
            error_message='Range is too large')

        # Test importing of a P2PKH address via descriptor
        key = get_key(self.nodes[0])
        self.log.info("Should import a p2pkh address from descriptor")
        self.test_importmulti(
            {
                "desc": descsum_create("pkh(" + key.pubkey + ")"),
                "timestamp": "now",
                "label": "Descriptor import test"
            },
            True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        test_address(self.nodes[1],
                     key.p2pkh_addr,
                     solvable=True,
                     ismine=False,
                     label="Descriptor import test")

        # Test import fails if both desc and scriptPubKey are provided
        key = get_key(self.nodes[0])
        self.log.info(
            "Import should fail if both scriptPubKey and desc are provided")
        self.test_importmulti(
            {
                "desc": descsum_create("pkh(" + key.pubkey + ")"),
                "scriptPubKey": {
                    "address": key.p2pkh_addr
                },
                "timestamp": "now"
            },
            success=False,
            error_code=-8,
            error_message=
            'Both a descriptor and a scriptPubKey should not be provided.')

        # Test import fails if neither desc nor scriptPubKey are present
        key = get_key(self.nodes[0])
        self.log.info(
            "Import should fail if neither a descriptor nor a scriptPubKey are provided"
        )
        self.test_importmulti(
            {"timestamp": "now"},
            success=False,
            error_code=-8,
            error_message=
            'Either a descriptor or scriptPubKey must be provided.')

        # Test importing of a multisig via descriptor
        key1 = get_key(self.nodes[0])
        key2 = get_key(self.nodes[0])
        self.log.info("Should import a 1-of-2 bare multisig from descriptor")
        self.test_importmulti(
            {
                "desc":
                descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey +
                               ")"),
                "timestamp":
                "now"
            },
            success=True,
            warnings=[
                "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."
            ])
        self.log.info(
            "Should not treat individual keys from the imported bare multisig as watchonly"
        )
        test_address(self.nodes[1],
                     key1.p2pkh_addr,
                     ismine=False,
                     iswatchonly=False)

        # Import pubkeys with key origin info
        self.log.info(
            "Addresses should have hd keypath and master key id after import with key origin"
        )
        pub_addr = self.nodes[1].getnewaddress()
        pub_addr = self.nodes[1].getnewaddress()
        info = self.nodes[1].getaddressinfo(pub_addr)
        pub = info['pubkey']
        pub_keypath = info['hdkeypath']
        pub_fpr = info['hdmasterfingerprint']
        result = self.nodes[0].importmulti([{
            'desc':
            descsum_create("wpkh([" + pub_fpr + pub_keypath[1:] + "]" + pub +
                           ")"),
            "timestamp":
            "now",
        }])
        assert result[0]['success']
        pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
        assert_equal(pub_import_info['hdmasterfingerprint'], pub_fpr)
        assert_equal(pub_import_info['pubkey'], pub)
        assert_equal(pub_import_info['hdkeypath'], pub_keypath)

        # Import privkeys with key origin info
        priv_addr = self.nodes[1].getnewaddress()
        info = self.nodes[1].getaddressinfo(priv_addr)
        priv = self.nodes[1].dumpprivkey(priv_addr)
        priv_keypath = info['hdkeypath']
        priv_fpr = info['hdmasterfingerprint']
        result = self.nodes[0].importmulti([{
            'desc':
            descsum_create("wpkh([" + priv_fpr + priv_keypath[1:] + "]" +
                           priv + ")"),
            "timestamp":
            "now",
        }])
        assert result[0]['success']
        priv_import_info = self.nodes[0].getaddressinfo(priv_addr)
        assert_equal(priv_import_info['hdmasterfingerprint'], priv_fpr)
        assert_equal(priv_import_info['hdkeypath'], priv_keypath)

        # Make sure the key origin info are still there after a restart
        self.stop_nodes()
        self.start_nodes()
        import_info = self.nodes[0].getaddressinfo(pub_addr)
        assert_equal(import_info['hdmasterfingerprint'], pub_fpr)
        assert_equal(import_info['hdkeypath'], pub_keypath)
        import_info = self.nodes[0].getaddressinfo(priv_addr)
        assert_equal(import_info['hdmasterfingerprint'], priv_fpr)
        assert_equal(import_info['hdkeypath'], priv_keypath)

        # Check legacy import does not import key origin info
        self.log.info("Legacy imports don't have key origin info")
        pub_addr = self.nodes[1].getnewaddress()
        info = self.nodes[1].getaddressinfo(pub_addr)
        pub = info['pubkey']
        result = self.nodes[0].importmulti([{
            'scriptPubKey': {
                'address': pub_addr
            },
            'pubkeys': [pub],
            "timestamp": "now",
        }])
        assert result[0]['success']
        pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
        assert_equal(pub_import_info['pubkey'], pub)
        assert 'hdmasterfingerprint' not in pub_import_info
        assert 'hdkeypath' not in pub_import_info

        # Import some public keys to the keypool of a no privkey wallet
        self.log.info("Adding pubkey to keypool of disableprivkey wallet")
        self.nodes[1].createwallet(wallet_name="noprivkeys",
                                   disable_private_keys=True)
        wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")

        addr1 = self.nodes[0].getnewaddress()
        addr2 = self.nodes[0].getnewaddress()
        pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
        pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
        result = wrpc.importmulti([{
            'desc': descsum_create('wpkh(' + pub1 + ')'),
            'keypool': True,
            "timestamp": "now",
        }, {
            'desc': descsum_create('wpkh(' + pub2 + ')'),
            'keypool': True,
            "timestamp": "now",
        }])
        assert result[0]['success']
        assert result[1]['success']
        assert_equal(wrpc.getwalletinfo()["keypoolsize"], 2)
        newaddr1 = wrpc.getnewaddress()
        assert_equal(addr1, newaddr1)
        newaddr2 = wrpc.getnewaddress()
        assert_equal(addr2, newaddr2)

        # Import some public keys to the internal keypool of a no privkey wallet
        self.log.info(
            "Adding pubkey to internal keypool of disableprivkey wallet")
        addr1 = self.nodes[0].getnewaddress()
        addr2 = self.nodes[0].getnewaddress()
        pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
        pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
        result = wrpc.importmulti([{
            'desc': descsum_create('wpkh(' + pub1 + ')'),
            'keypool': True,
            'internal': True,
            "timestamp": "now",
        }, {
            'desc': descsum_create('wpkh(' + pub2 + ')'),
            'keypool': True,
            'internal': True,
            "timestamp": "now",
        }])
        assert result[0]['success']
        assert result[1]['success']
        assert_equal(wrpc.getwalletinfo()["keypoolsize_hd_internal"], 2)
        newaddr1 = wrpc.getrawchangeaddress()
        assert_equal(addr1, newaddr1)
        newaddr2 = wrpc.getrawchangeaddress()
        assert_equal(addr2, newaddr2)

        # Import a multisig and make sure the keys don't go into the keypool
        self.log.info(
            'Imported scripts with pubkeys should not have their pubkeys go into the keypool'
        )
        addr1 = self.nodes[0].getnewaddress()
        addr2 = self.nodes[0].getnewaddress()
        pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
        pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
        result = wrpc.importmulti([{
            'desc':
            descsum_create('wsh(multi(2,' + pub1 + ',' + pub2 + '))'),
            'keypool':
            True,
            "timestamp":
            "now",
        }])
        assert result[0]['success']
        assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)

        # Cannot import those pubkeys to keypool of wallet with privkeys
        self.log.info(
            "Pubkeys cannot be added to the keypool of a wallet with private keys"
        )
        wrpc = self.nodes[1].get_wallet_rpc("")
        assert wrpc.getwalletinfo()['private_keys_enabled']
        result = wrpc.importmulti([{
            'desc': descsum_create('wpkh(' + pub1 + ')'),
            'keypool': True,
            "timestamp": "now",
        }])
        assert_equal(result[0]['error']['code'], -8)
        assert_equal(
            result[0]['error']['message'],
            "Keys can only be imported to the keypool when private keys are disabled"
        )

        # Make sure ranged imports import keys in order
        self.log.info('Key ranges should be imported in order')
        wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
        assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
        assert_equal(wrpc.getwalletinfo()["private_keys_enabled"], False)
        xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
        addresses = [
            'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv',  # m/0'/0'/0
            'bcrt1q8vprchan07gzagd5e6v9wd7azyucksq2xc76k8',  # m/0'/0'/1
            'bcrt1qtuqdtha7zmqgcrr26n2rqxztv5y8rafjp9lulu',  # m/0'/0'/2
            'bcrt1qau64272ymawq26t90md6an0ps99qkrse58m640',  # m/0'/0'/3
            'bcrt1qsg97266hrh6cpmutqen8s4s962aryy77jp0fg0',  # m/0'/0'/4
        ]
        result = wrpc.importmulti([{
            'desc':
            descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
            'keypool':
            True,
            'timestamp':
            'now',
            'range': [0, 4],
        }])
        for i in range(0, 5):
            addr = wrpc.getnewaddress('', 'bech32')
            assert_equal(addr, addresses[i])
Пример #10
0
def pk(hex_key):
    """Construct a script expression for taproot_construct for pk(hex_key)."""
    return (None, CScript([bytes.fromhex(hex_key), OP_CHECKSIG]))
Пример #11
0
    def test_opt_in(self):
        """Replacing should only work if orig tx opted in"""
        tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a non-opting in transaction
        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
        tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
        tx1a_hex = txToHex(tx1a)
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)

        # Shouldn't be able to double-spend
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))]
        tx1b_hex = txToHex(tx1b)

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx1b_hex,
                                True)

        tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a different non-opting in transaction
        tx2a = CTransaction()
        tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
        tx2a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
        tx2a_hex = txToHex(tx2a)
        tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True)

        # Still shouldn't be able to double-spend
        tx2b = CTransaction()
        tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))]
        tx2b_hex = txToHex(tx2b)

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx2b_hex,
                                True)

        # Now create a new transaction that spends from tx1a and tx2a
        # opt-in on one of the inputs
        # Transaction should be replaceable on either input

        tx1a_txid = int(tx1a_txid, 16)
        tx2a_txid = int(tx2a_txid, 16)

        tx3a = CTransaction()
        tx3a.vin = [
            CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
            CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)
        ]
        tx3a.vout = [
            CTxOut(int(0.9 * COIN), CScript([b'c'])),
            CTxOut(int(0.9 * COIN), CScript([b'd']))
        ]
        tx3a_hex = txToHex(tx3a)

        self.nodes[0].sendrawtransaction(tx3a_hex, True)

        tx3b = CTransaction()
        tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
        tx3b.vout = [CTxOut(int(0.5 * COIN), CScript([b'e' * 35]))]
        tx3b_hex = txToHex(tx3b)

        tx3c = CTransaction()
        tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
        tx3c.vout = [CTxOut(int(0.5 * COIN), CScript([b'f' * 35]))]
        tx3c_hex = txToHex(tx3c)

        self.nodes[0].sendrawtransaction(tx3b_hex, True)
        # If tx3b was accepted, tx3c won't look like a replacement,
        # but make sure it is accepted anyway
        self.nodes[0].sendrawtransaction(tx3c_hex, True)
Пример #12
0
    def next_block(self,
                   number,
                   spend=None,
                   additional_coinbase_value=0,
                   script=None,
                   block_size=0):
        if self.tip == None:
            base_block_hash = self.genesis_hash
        else:
            base_block_hash = self.tip.sha256
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[
                spend.n].nValue - 1  # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, self.block_time)
        spendable_output = None
        if (spend != None):
            tx = CTransaction()
            tx.vin.append(
                CTxIn(COutPoint(spend.tx.sha256, spend.n), b"",
                      0xffffffff))  # no signature yet
            # This copies the java comparison tool testing behavior: the first
            # txout has a garbage scriptPubKey, "to make sure we're not
            # pre-verifying too much" (?)
            tx.vout.append(
                CTxOut(0, CScript([random.randint(0, 255), height & 255])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            spendable_output = PreviousSpendableOutput(tx, 0)

            # Now sign it if necessary
            scriptSig = b""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                (sighash,
                 err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx,
                                      0, SIGHASH_ALL)
                scriptSig = CScript([
                    self.coinbase_key.sign(sighash) +
                    bytes(bytearray([SIGHASH_ALL]))
                ])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            block = self.add_transactions_to_block(block, [tx])
        if spendable_output != None and block_size > 0:
            while len(block.serialize()) < block_size:
                tx = CTransaction()
                script_length = block_size - len(block.serialize()) - 79
                if script_length > 510000:
                    script_length = 500000
                script_output = CScript([b'\x00' * script_length])
                tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
                tx.vout.append(CTxOut(0, script_output))
                tx.vin.append(
                    CTxIn(
                        COutPoint(spendable_output.tx.sha256,
                                  spendable_output.n)))
                spendable_output = PreviousSpendableOutput(tx, 0)
                block = self.add_transactions_to_block(block, [tx])
            # Make sure the math above worked out to produce the correct block size
            # (the math will fail if there are too many transactions in the block)
            assert_equal(len(block.serialize()), block_size)
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        self.block_time += 1
        assert number not in self.blocks
        self.blocks[number] = block
        return block
Пример #13
0
    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previous marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject=None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # add transactions to a block produced by next_block
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            old_hash = block.sha256
            self.add_transactions_to_block(block, new_transactions)
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            self.block_heights[block.sha256] = self.block_heights[old_hash]
            del self.block_heights[old_hash]
            self.blocks[block_number] = block
            return block

        # creates a new block and advances the tip to that block
        block = self.next_block

        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()

        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(1000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test

        # Start by building a couple of blocks on top (which output is spent is
        # in parentheses):
        #     genesis -> b1 (0) -> b2 (1)
        out0 = get_spendable_output()
        block(1, spend=out0)
        save_spendable_output()
        yield accepted()

        out1 = get_spendable_output()
        b2 = block(2, spend=out1)
        yield accepted()

        # so fork like this:
        #
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1)
        #
        # Nothing should happen at this point. We saw b2 first so it takes priority.
        tip(1)
        b3 = block(3, spend=out1)
        txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1)
        yield rejected()

        # Now we add another block to make the alternative chain longer.
        #
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1) -> b4 (2)
        out2 = get_spendable_output()
        block(4, spend=out2)
        yield accepted()

        # ... and back to the first chain.
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                      \-> b3 (1) -> b4 (2)
        tip(2)
        block(5, spend=out2)
        save_spendable_output()
        yield rejected()

        out3 = get_spendable_output()
        block(6, spend=out3)
        yield accepted()

        # Try to create a fork that double-spends
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                          \-> b7 (2) -> b8 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(7, spend=out2)
        yield rejected()

        out4 = get_spendable_output()
        block(8, spend=out4)
        yield rejected()

        # Try to create a block that has too much fee
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                                    \-> b9 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(6)
        block(9, spend=out4, additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))

        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b10 (3) -> b11 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(10, spend=out3)
        yield rejected()

        block(11, spend=out4, additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))

        # Try again, but with a valid fork first
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
        #                                              (b12 added last)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        b12 = block(12, spend=out3)
        save_spendable_output()
        #yield TestInstance([[b12, False]])
        b13 = block(13, spend=out4)
        # Deliver the block header for b12, and the block b13.
        # b13 should be accepted but the tip won't advance until b12 is delivered.
        yield TestInstance([[CBlockHeader(b12), None], [b13, False]])

        save_spendable_output()
        out5 = get_spendable_output()
        # b14 is invalid, but the node won't know that until it tries to connect
        # Tip still can't advance because b12 is missing
        block(14, spend=out5, additional_coinbase_value=1)
        yield rejected()

        yield TestInstance([[b12, True, b13.sha256]])  # New tip should be b13.

        # Add a block with MAX_BLOCK_SIGOPS_PER_MB and one with one more sigop
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
        #                      \-> b3 (1) -> b4 (2)

        # Test that a block with a lot of checksigs is okay
        lots_of_checksigs = CScript([OP_CHECKSIG] *
                                    (MAX_BLOCK_SIGOPS_PER_MB - 1))
        tip(13)
        block(15, spend=out5, script=lots_of_checksigs)
        yield accepted()

        # Test that a block with too many checksigs is rejected
        out6 = get_spendable_output()
        too_many_checksigs = CScript([OP_CHECKSIG] * MAX_BLOCK_SIGOPS_PER_MB)
        block(16, spend=out6, script=too_many_checksigs)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Attempt to spend a transaction created on a different fork
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        block(17, spend=txout_b3)
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # Attempt to spend a transaction created on a different fork (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b18 (b3.vtx[1]) -> b19 (6)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(18, spend=txout_b3)
        yield rejected()

        block(19, spend=out6)
        yield rejected()

        # Attempt to spend a coinbase at depth too low
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        out7 = get_spendable_output()
        block(20, spend=out7)
        yield rejected(
            RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))

        # Attempt to spend a coinbase at depth too low (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b21 (6) -> b22 (5)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(21, spend=out6)
        yield rejected()

        block(22, spend=out5)
        yield rejected()

        # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
        #                                                                           \-> b24 (6) -> b25 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        block(23, spend=out6, block_size=MAX_BLOCK_SIZE)
        yield accepted()

        # Make the next block one byte bigger and check that it fails
        tip(15)
        block(24, spend=out6, block_size=MAX_BLOCK_SIZE + 1)
        yield rejected(RejectResult(19, b'bad-blk-length'))

        b25 = block(25, spend=out7)
        yield rejected()

        # Create blocks with a coinbase input script size out of range
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
        #                                                                           \-> ... (6) -> ... (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        b26 = block(26, spend=out6)
        b26.vtx[0].vin[0].scriptSig = b'\x00'
        b26.vtx[0].rehash()
        # update_block causes the merkle root to get updated, even with no new
        # transactions, and updates the required state.
        b26 = update_block(26, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b26 chain to make sure bitcoind isn't accepting b26
        b27 = block(27, spend=out7)
        yield rejected()

        # Now try a too-large-coinbase script
        tip(15)
        b28 = block(28, spend=out6)
        b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
        b28.vtx[0].rehash()
        b28 = update_block(28, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b28 chain to make sure bitcoind isn't accepted b28
        b29 = block(29, spend=out7)
        # TODO: Should get a reject message back with "bad-prevblk", except
        # there's a bug that prevents this from being detected.  Just note
        # failure for now, and add the reject result later.
        yield rejected()

        # b30 has a max-sized coinbase scriptSig.
        tip(23)
        b30 = block(30)
        b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
        b30.vtx[0].rehash()
        b30 = update_block(30, [])
        yield accepted()

        # Add a block with double MAX_BLOCK_SIGOPS_PER_MB sigops to a 1.5 MB block
        lots_of_checksigs = CScript([OP_CHECKSIG] *
                                    (MAX_BLOCK_SIGOPS_PER_MB * 2 - 1))
        block(31,
              spend=get_spendable_output(),
              script=lots_of_checksigs,
              block_size=1500000)
        yield accepted()

        # Add a block with double MAX_BLOCK_SIGOPS_PER_MB sigops to a 2 MB block
        block(32,
              spend=get_spendable_output(),
              script=lots_of_checksigs,
              block_size=MAX_BLOCK_SIZE)
        yield accepted()

        # Make the next block have one additional sigop, and ensure it is rejected
        too_many_checksigs = CScript([OP_CHECKSIG] *
                                     (MAX_BLOCK_SIGOPS_PER_MB * 2))
        block(33,
              spend=get_spendable_output(),
              script=too_many_checksigs,
              block_size=MAX_BLOCK_SIZE)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))
Пример #14
0
    def run_test(self):
        node = self.nodes[0]

        self.log.info('Start with empty mempool, and 200 blocks')
        self.mempool_size = 0
        assert_equal(node.getblockcount(), 200)
        assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
        coins = node.listunspent()

        self.log.info('Should not accept garbage to testmempoolaccept')
        assert_raises_rpc_error(
            -3, 'Expected type array, got string',
            lambda: node.testmempoolaccept(rawtxs='ff00baar'))
        assert_raises_rpc_error(
            -8, 'Array must contain between 1 and 25 transactions.',
            lambda: node.testmempoolaccept(rawtxs=['ff22'] * 26))
        assert_raises_rpc_error(
            -8, 'Array must contain between 1 and 25 transactions.',
            lambda: node.testmempoolaccept(rawtxs=[]))
        assert_raises_rpc_error(
            -22, 'TX decode failed',
            lambda: node.testmempoolaccept(rawtxs=['ff00baar']))

        self.log.info('A transaction already in the blockchain')
        coin = coins.pop()  # Pick a random coin(base) to spend
        raw_tx_in_block = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': coin['txid'],
                    'vout': coin['vout']
                }],
                outputs=[{
                    node.getnewaddress(): 0.3
                }, {
                    node.getnewaddress(): 49
                }],
            ))['hex']
        txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block,
                                                maxfeerate=0)
        node.generate(1)
        self.mempool_size = 0
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_in_block,
                'allowed': False,
                'reject-reason': 'txn-already-known'
            }],
            rawtxs=[raw_tx_in_block],
        )

        self.log.info('A transaction not in the mempool')
        fee = Decimal('0.000007')
        raw_tx_0 = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    "txid": txid_in_block,
                    "vout": 0,
                    "sequence": BIP125_SEQUENCE_NUMBER
                }],  # RBF is used later
                outputs=[{
                    node.getnewaddress(): Decimal('0.3') - fee
                }],
            ))['hex']
        tx = tx_from_hex(raw_tx_0)
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': fee
                }
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A final transaction not in the mempool')
        coin = coins.pop()  # Pick a random coin(base) to spend
        output_amount = Decimal('0.025')
        raw_tx_final = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': coin['txid'],
                    'vout': coin['vout'],
                    "sequence": 0xffffffff
                }],  # SEQUENCE_FINAL
                outputs=[{
                    node.getnewaddress(): output_amount
                }],
                locktime=node.getblockcount() + 2000,  # Can be anything
            ))['hex']
        tx = tx_from_hex(raw_tx_final)
        fee_expected = coin['amount'] - output_amount
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': fee_expected
                }
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )
        node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0)
        self.mempool_size += 1

        self.log.info('A transaction in the mempool')
        node.sendrawtransaction(hexstring=raw_tx_0)
        self.mempool_size += 1
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': False,
                'reject-reason': 'txn-already-in-mempool'
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that replaces a mempool transaction')
        tx = tx_from_hex(raw_tx_0)
        tx.vout[0].nValue -= int(fee * COIN)  # Double the fee
        tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1  # Now, opt out of RBF
        raw_tx_0 = node.signrawtransactionwithwallet(
            tx.serialize().hex())['hex']
        tx = tx_from_hex(raw_tx_0)
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': (2 * fee)
                }
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that conflicts with an unconfirmed tx')
        # Send the transaction that replaces the mempool transaction and opts out of replaceability
        node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
        # take original raw_tx_0
        tx = tx_from_hex(raw_tx_0)
        tx.vout[0].nValue -= int(4 * fee * COIN)  # Set more fee
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'txn-mempool-conflict'
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )

        self.log.info('A transaction with missing inputs, that never existed')
        tx = tx_from_hex(raw_tx_0)
        tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info(
            'A transaction with missing inputs, that existed once in the past')
        tx = tx_from_hex(raw_tx_0)
        tx.vin[
            0].prevout.n = 1  # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
        raw_tx_1 = node.signrawtransactionwithwallet(
            tx.serialize().hex())['hex']
        txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0)
        # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
        raw_tx_spend_both = node.signrawtransactionwithwallet(
            node.createrawtransaction(inputs=[
                {
                    'txid': txid_0,
                    'vout': 0
                },
                {
                    'txid': txid_1,
                    'vout': 0
                },
            ],
                                      outputs=[{
                                          node.getnewaddress(): 0.1
                                      }]))['hex']
        txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both,
                                                  maxfeerate=0)
        node.generate(1)
        self.mempool_size = 0
        # Now see if we can add the coins back to the utxo set by sending the exact txs again
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[raw_tx_0],
        )
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_1,
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[raw_tx_1],
        )

        self.log.info('Create a signed "reference" tx for later use')
        raw_tx_reference = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': txid_spend_both,
                    'vout': 0
                }],
                outputs=[{
                    node.getnewaddress(): 0.05
                }],
            ))['hex']
        tx = tx_from_hex(raw_tx_reference)
        # Reference tx should be valid on itself
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': Decimal('0.1') - Decimal('0.05')
                }
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )

        self.log.info('A transaction with no outputs')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout = []
        # Skip re-signing the transaction for context independent checks from now on
        # tx = tx_from_hex(node.signrawtransactionwithwallet(tx.serialize().hex())['hex'])
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-vout-empty'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A really large transaction')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin = [tx.vin[0]] * math.ceil(
            MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize()))
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-oversize'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with negative output value')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].nValue *= -1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-vout-negative'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        # The following two validations prevent overflow of the output amounts (see CVE-2010-5139).
        self.log.info('A transaction with too large output value')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].nValue = MAX_MONEY + 1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-vout-toolarge'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with too large sum of output values')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout = [tx.vout[0]] * 2
        tx.vout[0].nValue = MAX_MONEY
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-txouttotal-toolarge'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with duplicate inputs')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin = [tx.vin[0]] * 2
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-inputs-duplicate'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A coinbase transaction')
        # Pick the input of the first tx we signed, so it has to be a coinbase tx
        raw_tx_coinbase_spent = node.getrawtransaction(
            txid=node.decoderawtransaction(
                hexstring=raw_tx_in_block)['vin'][0]['txid'])
        tx = tx_from_hex(raw_tx_coinbase_spent)
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'coinbase'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('Some nonstandard transactions')
        tx = tx_from_hex(raw_tx_reference)
        tx.nVersion = 3  # A version currently non-standard
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'version'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].scriptPubKey = CScript([OP_0])  # Some non-standard script
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'scriptpubkey'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        key = ECKey()
        key.generate()
        pubkey = key.get_pubkey().get_bytes()
        tx.vout[0].scriptPubKey = CScript(
            [OP_2, pubkey, pubkey, pubkey, OP_3,
             OP_CHECKMULTISIG])  # Some bare multisig script (2-of-3)
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bare-multisig'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].scriptSig = CScript([OP_HASH160
                                       ])  # Some not-pushonly scriptSig
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'scriptsig-not-pushonly'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].scriptSig = CScript(
            [b'a' * 1648])  # Some too large scriptSig (>1650 bytes)
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'scriptsig-size'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        output_p2sh_burn = CTxOut(nValue=540,
                                  scriptPubKey=CScript(
                                      [OP_HASH160,
                                       hash160(b'burn'), OP_EQUAL]))
        num_scripts = 100000 // len(output_p2sh_burn.serialize(
        ))  # Use enough outputs to make the tx too large for our policy
        tx.vout = [output_p2sh_burn] * num_scripts
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'tx-size'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0] = output_p2sh_burn
        tx.vout[
            0].nValue -= 1  # Make output smaller, such that it is dust for our policy
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'dust'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
        tx.vout = [tx.vout[0]] * 2
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'multi-op-return'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A timelocked transaction')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[
            0].nSequence -= 1  # Should be non-max, so locktime is not ignored
        tx.nLockTime = node.getblockcount() + 1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'non-final'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction that is locked by BIP68 sequence logic')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[
            0].nSequence = 2  # We could include it in the second block mined from now, but not the very next one
        # Can skip re-signing the tx because of early rejection
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'non-BIP68-final'
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )
Пример #15
0
    def run_test(self):
        node = self.nodes[0]
        default_p2p = node.add_p2p_connection(P2PDataStore())
        test_p2p = node.add_p2p_connection(TestP2PConn())

        # Set the blocksize to 2MB as initial condition
        node.setexcessiveblock(self.excessive_block_size)

        self.genesis_hash = int(node.getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # shorthand for functions
        block = self.next_block

        # Create a new block
        block(0)
        save_spendable_output()
        default_p2p.send_blocks_and_test([self.tip], node)

        # Now we need that block to mature so we can spend the coinbase.
        maturity_blocks = []
        for i in range(99):
            block(5000 + i)
            maturity_blocks.append(self.tip)
            save_spendable_output()

        # Get to one block of the May 15, 2018 HF activation
        for i in range(6):
            block(5100 + i)
            maturity_blocks.append(self.tip)

        # Send it all to the node at once.
        default_p2p.send_blocks_and_test(maturity_blocks, node)

        # collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(100):
            out.append(get_spendable_output())

        # Check that compact block also work for big blocks
        # Wait for SENDCMPCT
        def received_sendcmpct():
            return (test_p2p.last_sendcmpct != None)

        wait_until(received_sendcmpct, timeout=30)

        sendcmpct = msg_sendcmpct()
        sendcmpct.version = 1
        sendcmpct.announce = True
        test_p2p.send_and_ping(sendcmpct)

        # Exchange headers
        def received_getheaders():
            return (test_p2p.last_getheaders != None)

        wait_until(received_getheaders, timeout=30)

        # Return the favor
        test_p2p.send_message(test_p2p.last_getheaders)

        # Wait for the header list
        def received_headers():
            return (test_p2p.last_headers != None)

        wait_until(received_headers, timeout=30)

        # It's like we know about the same headers !
        test_p2p.send_message(test_p2p.last_headers)

        # Send a block
        b1 = block(1, spend=out[0], block_size=ONE_MEGABYTE + 1)
        default_p2p.send_blocks_and_test([self.tip], node)

        # Checks the node to forward it via compact block
        def received_block():
            return (test_p2p.last_cmpctblock != None)

        wait_until(received_block, timeout=30)

        # Was it our block ?
        cmpctblk_header = test_p2p.last_cmpctblock.header_and_shortids.header
        cmpctblk_header.calc_sha256()
        assert cmpctblk_header.sha256 == b1.sha256

        # Send a large block with numerous transactions.
        test_p2p.clear_block_data()
        b2 = block(2,
                   spend=out[1],
                   extra_txns=70000,
                   block_size=self.excessive_block_size - 1000)
        default_p2p.send_blocks_and_test([self.tip], node)

        # Checks the node forwards it via compact block
        wait_until(received_block, timeout=30)

        # Was it our block ?
        cmpctblk_header = test_p2p.last_cmpctblock.header_and_shortids.header
        cmpctblk_header.calc_sha256()
        assert cmpctblk_header.sha256 == b2.sha256

        # In order to avoid having to resend a ton of transactions, we invalidate
        # b2, which will send all its transactions in the mempool. Note that this
        # assumes reorgs will insert low-fee transactions back into the mempool.
        node.invalidateblock(node.getbestblockhash())

        # Let's send a compact block and see if the node accepts it.
        # Let's modify b2 and use it so that we can reuse the mempool.
        tx = b2.vtx[0]
        tx.vout.append(CTxOut(0, CScript([random.randint(0, 256), OP_RETURN])))
        tx.rehash()
        b2.vtx[0] = tx
        b2.hashMerkleRoot = b2.calc_merkle_root()
        b2.solve()

        # Now we create the compact block and send it
        comp_block = HeaderAndShortIDs()
        comp_block.initialize_from_block(b2)
        test_p2p.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))

        # Check that compact block is received properly
        assert int(node.getbestblockhash(), 16) == b2.sha256
Пример #16
0
    def run_test(self):
        node = self.nodes[0]

        self.log.info('Start with empty mempool, and 200 blocks')
        self.mempool_size = 0
        wait_until(lambda: node.getblockcount() == 200)
        assert_equal(node.getmempoolinfo()['size'], self.mempool_size)

        self.log.info('Should not accept garbage to testmempoolaccept')
        assert_raises_rpc_error(
            -3, 'Expected type array, got string',
            lambda: node.testmempoolaccept(rawtxs='ff00baar'))
        assert_raises_rpc_error(
            -8, 'Array must contain exactly one raw transaction for now',
            lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22']))
        assert_raises_rpc_error(
            -22, 'TX decode failed',
            lambda: node.testmempoolaccept(rawtxs=['ff00baar']))

        self.log.info('A transaction already in the blockchain')
        coin = node.listunspent()[0]  # Pick a random coin(base) to spend
        raw_tx_in_block = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': coin['txid'],
                    'vout': coin['vout']
                }],
                outputs=[{
                    node.getnewaddress(): 0.3
                }, {
                    node.getnewaddress(): 49
                }],
            ))['hex']
        txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block,
                                                allowhighfees=True)
        node.generate(1)
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_in_block,
                'allowed': False,
                'reject-reason': '18: txn-already-known'
            }],
            rawtxs=[raw_tx_in_block],
        )

        self.log.info('A transaction not in the mempool')
        fee = 0.00000700
        raw_tx_0 = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    "txid": txid_in_block,
                    "vout": 0,
                    "sequence": BIP125_SEQUENCE_NUMBER
                }],  # RBF is used later
                outputs=[{
                    node.getnewaddress(): 0.3 - fee
                }],
            ))['hex']
        tx = CTransaction()
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': True
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction in the mempool')
        node.sendrawtransaction(hexstring=raw_tx_0)
        self.mempool_size = 1
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': False,
                'reject-reason': '18: txn-already-in-mempool'
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that replaces a mempool transaction')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vout[0].nValue -= int(fee * COIN)  # Double the fee
        tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1  # Now, opt out of RBF
        raw_tx_0 = node.signrawtransactionwithwallet(
            bytes_to_hex_str(tx.serialize()))['hex']
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': True
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that conflicts with an unconfirmed tx')
        # Send the transaction that replaces the mempool transaction and opts out of replaceability
        node.sendrawtransaction(hexstring=bytes_to_hex_str(tx.serialize()),
                                allowhighfees=True)
        # take original raw_tx_0
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vout[0].nValue -= int(4 * fee * COIN)  # Set more fee
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '18: txn-mempool-conflict'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
            allowhighfees=True,
        )

        self.log.info('A transaction with missing inputs, that never existed')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info(
            'A transaction with missing inputs, that existed once in the past')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vin[
            0].prevout.n = 1  # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
        raw_tx_1 = node.signrawtransactionwithwallet(
            bytes_to_hex_str(tx.serialize()))['hex']
        txid_1 = node.sendrawtransaction(hexstring=raw_tx_1,
                                         allowhighfees=True)
        # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
        raw_tx_spend_both = node.signrawtransactionwithwallet(
            node.createrawtransaction(inputs=[
                {
                    'txid': txid_0,
                    'vout': 0
                },
                {
                    'txid': txid_1,
                    'vout': 0
                },
            ],
                                      outputs=[{
                                          node.getnewaddress(): 0.1
                                      }]))['hex']
        txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both,
                                                  allowhighfees=True)
        node.generate(1)
        self.mempool_size = 0
        # Now see if we can add the coins back to the utxo set by sending the exact txs again
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[raw_tx_0],
        )
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_1,
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[raw_tx_1],
        )

        self.log.info('Create a signed "reference" tx for later use')
        raw_tx_reference = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': txid_spend_both,
                    'vout': 0
                }],
                outputs=[{
                    node.getnewaddress(): 0.05
                }],
            ))['hex']
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        # Reference tx should be valid on itself
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': True
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with no outputs')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout = []
        # Skip re-signing the transaction for context independent checks from now on
        # tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex'])))
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '16: bad-txns-vout-empty'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A really large transaction')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin = [tx.vin[0]
                  ] * (MAX_BLOCK_BASE_SIZE // len(tx.vin[0].serialize()))
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '16: bad-txns-oversize'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with negative output value')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].nValue *= -1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '16: bad-txns-vout-negative'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with too large output value')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].nValue = 16320000 * COIN + 1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '16: bad-txns-vout-toolarge'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with too large sum of output values')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout = [tx.vout[0]] * 2
        tx.vout[0].nValue = 16320000 * COIN
        self.check_mempool_result(
            result_expected=[{
                'txid':
                tx.rehash(),
                'allowed':
                False,
                'reject-reason':
                '16: bad-txns-txouttotal-toolarge'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with duplicate inputs')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin = [tx.vin[0]] * 2
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '16: bad-txns-inputs-duplicate'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A coinbase transaction')
        # Pick the input of the first tx we signed, so it has to be a coinbase tx
        raw_tx_coinbase_spent = node.getrawtransaction(
            txid=node.decoderawtransaction(
                hexstring=raw_tx_in_block)['vin'][0]['txid'])
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent)))
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '16: coinbase'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('Some nonstandard transactions')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.nVersion = 3  # A version currently non-standard
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: version'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].scriptPubKey = CScript([OP_0])  # Some non-standard script
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: scriptpubkey'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin[0].scriptSig = CScript([OP_HASH160
                                       ])  # Some not-pushonly scriptSig
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: scriptsig-not-pushonly'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        output_p2sh_burn = CTxOut(nValue=540,
                                  scriptPubKey=CScript(
                                      [OP_HASH160,
                                       hash160(b'burn'), OP_EQUAL]))
        num_scripts = 100000 // len(output_p2sh_burn.serialize(
        ))  # Use enough outputs to make the tx too large for our policy
        tx.vout = [output_p2sh_burn] * num_scripts
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: tx-size'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0] = output_p2sh_burn
        tx.vout[
            0].nValue -= 1  # Make output smaller, such that it is dust for our policy
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: dust'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
        tx.vout = [tx.vout[0]] * 2
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: multi-op-return'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A timelocked transaction')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin[
            0].nSequence -= 1  # Should be non-max, so locktime is not ignored
        tx.nLockTime = node.getblockcount() + 1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: non-final'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction that is locked by BIP68 sequence logic')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin[
            0].nSequence = 2  # We could include it in the second block mined from now, but not the very next one
        # Can skip re-signing the tx because of early rejection
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': '64: non-BIP68-final'
            }],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
            allowhighfees=True,
        )
    def run_test(self):
        """Main test logic"""

        self.log.info(
            "Test importaddress with label and importprivkey without label.")
        self.log.info("Import a watch-only address with a label.")
        address = self.nodes[0].getnewaddress()
        label = "Test Label"
        self.nodes[1].importaddress(address, label)
        test_address(self.nodes[1],
                     address,
                     iswatchonly=True,
                     ismine=False,
                     label=label)

        self.log.info("Import the watch-only address's private key without a "
                      "label and the address should keep its label.")
        priv_key = self.nodes[0].dumpprivkey(address)
        self.nodes[1].importprivkey(priv_key)

        test_address(self.nodes[1], address, label=label)

        self.log.info(
            "Test importaddress without label and importprivkey with label.")
        self.log.info("Import a watch-only address without a label.")
        address2 = self.nodes[0].getnewaddress()
        self.nodes[1].importaddress(address2)
        test_address(self.nodes[1],
                     address2,
                     iswatchonly=True,
                     ismine=False,
                     label="")

        self.log.info("Import the watch-only address's private key with a "
                      "label and the address should have its label updated.")
        priv_key2 = self.nodes[0].dumpprivkey(address2)
        label2 = "Test Label 2"
        self.nodes[1].importprivkey(priv_key2, label2)

        test_address(self.nodes[1], address2, label=label2)

        self.log.info(
            "Test importaddress with label and importprivkey with label.")
        self.log.info("Import a watch-only address with a label.")
        address3 = self.nodes[0].getnewaddress()
        label3_addr = "Test Label 3 for importaddress"
        self.nodes[1].importaddress(address3, label3_addr)
        test_address(self.nodes[1],
                     address3,
                     iswatchonly=True,
                     ismine=False,
                     label=label3_addr)

        self.log.info("Import the watch-only address's private key with a "
                      "label and the address should have its label updated.")
        priv_key3 = self.nodes[0].dumpprivkey(address3)
        label3_priv = "Test Label 3 for importprivkey"
        self.nodes[1].importprivkey(priv_key3, label3_priv)

        test_address(self.nodes[1], address3, label=label3_priv)

        self.log.info("Test importprivkey won't label new dests with the same "
                      "label as others labeled dests for the same key.")
        self.log.info("Import a watch-only legacy address with a label.")
        address4 = self.nodes[0].getnewaddress()
        label4_addr = "Test Label 4 for importaddress"
        self.nodes[1].importaddress(address4, label4_addr)
        test_address(self.nodes[1],
                     address4,
                     iswatchonly=True,
                     ismine=False,
                     label=label4_addr,
                     embedded=None)

        self.log.info("Import the watch-only address's private key without a "
                      "label and new destinations for the key should have an "
                      "empty label while the 'old' destination should keep "
                      "its label.")

        # Build a P2SH manually for this test.
        priv_key4 = self.nodes[0].dumpprivkey(address4)
        pubkey4 = self.nodes[0].getaddressinfo(address4)['pubkey']
        pkh4 = hash160(hex_str_to_bytes(pubkey4))
        script4 = CScript(
            [OP_DUP, OP_HASH160, pkh4, OP_EQUALVERIFY, OP_CHECKSIG])
        p2shaddr4 = script_to_p2sh(script4)

        self.nodes[1].importmulti([{
            "scriptPubKey": {
                "address": p2shaddr4
            },
            "timestamp": "now",
            "redeemscript": script4.hex(),
            "keys": [priv_key4],
        }])

        test_address(self.nodes[1], p2shaddr4, label="")

        embedded_addr = self.nodes[1].getaddressinfo(
            p2shaddr4)['embedded']['address']
        test_address(self.nodes[1], embedded_addr, label=label4_addr)

        self.stop_nodes()
Пример #18
0
    def test_doublespend_tree(self):
        """Doublespend of a big tree of transactions"""

        initial_nValue = 50 * COIN
        tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)

        def branch(prevout,
                   initial_value,
                   max_txs,
                   tree_width=5,
                   fee=0.0001 * COIN,
                   _total_txs=None):
            if _total_txs is None:
                _total_txs = [0]
            if _total_txs[0] >= max_txs:
                return

            txout_value = (initial_value - fee) // tree_width
            if txout_value < fee:
                return

            vout = [
                CTxOut(txout_value, CScript([i + 1]))
                for i in range(tree_width)
            ]
            tx = CTransaction()
            tx.vin = [CTxIn(prevout, nSequence=0)]
            tx.vout = vout
            tx_hex = txToHex(tx)

            assert len(tx.serialize()) < 100000
            txid = self.nodes[0].sendrawtransaction(tx_hex, True)
            yield tx
            _total_txs[0] += 1

            txid = int(txid, 16)

            for i, txout in enumerate(tx.vout):
                for x in branch(COutPoint(txid, i),
                                txout_value,
                                max_txs,
                                tree_width=tree_width,
                                fee=fee,
                                _total_txs=_total_txs):
                    yield x

        fee = int(0.0001 * COIN)
        n = MAX_REPLACEMENT_LIMIT
        tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
        assert_equal(len(tree_txs), n)

        # Attempt double-spend, will fail because too little fee paid
        dbl_tx = CTransaction()
        dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        dbl_tx.vout = [CTxOut(initial_nValue - fee * n, CScript([1] * 35))]
        dbl_tx_hex = txToHex(dbl_tx)
        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, dbl_tx_hex,
                                True)

        # 1 BTC fee is enough
        dbl_tx = CTransaction()
        dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        dbl_tx.vout = [
            CTxOut(initial_nValue - fee * n - 1 * COIN, CScript([1] * 35))
        ]
        dbl_tx_hex = txToHex(dbl_tx)
        self.nodes[0].sendrawtransaction(dbl_tx_hex, True)

        mempool = self.nodes[0].getrawmempool()

        for tx in tree_txs:
            tx.rehash()
            assert tx.hash not in mempool

        # Try again, but with more total transactions than the "max txs
        # double-spent at once" anti-DoS limit.
        for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2):
            fee = int(0.0001 * COIN)
            tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
            tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
            assert_equal(len(tree_txs), n)

            dbl_tx = CTransaction()
            dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
            dbl_tx.vout = [
                CTxOut(initial_nValue - 2 * fee * n, CScript([1] * 35))
            ]
            dbl_tx_hex = txToHex(dbl_tx)
            # This will raise an exception
            assert_raises_rpc_error(-26, "too many potential replacements",
                                    self.nodes[0].sendrawtransaction,
                                    dbl_tx_hex, True)

            for tx in tree_txs:
                tx.rehash()
                self.nodes[0].getrawtransaction(tx.hash)
Пример #19
0
    def test_opt_in(self):
        """Replacing should only work if orig tx opted in"""
        tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a non-opting in transaction
        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
        tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx1a_hex = tx1a.serialize().hex()
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)

        # This transaction isn't shown as replaceable
        assert_equal(
            self.nodes[0].getmempoolentry(tx1a_txid)['bip125-replaceable'],
            False)

        # Shouldn't be able to double-spend
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx1b_hex = tx1b.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

        tx1_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a different non-opting in transaction
        tx2a = CTransaction()
        tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
        tx2a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx2a_hex = tx2a.serialize().hex()
        tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, 0)

        # Still shouldn't be able to double-spend
        tx2b = CTransaction()
        tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx2b_hex = tx2b.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx2b_hex, 0)

        # Now create a new transaction that spends from tx1a and tx2a
        # opt-in on one of the inputs
        # Transaction should be replaceable on either input

        tx1a_txid = int(tx1a_txid, 16)
        tx2a_txid = int(tx2a_txid, 16)

        tx3a = CTransaction()
        tx3a.vin = [
            CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
            CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)
        ]
        tx3a.vout = [
            CTxOut(int(0.9 * COIN), CScript([b'c'])),
            CTxOut(int(0.9 * COIN), CScript([b'd']))
        ]
        tx3a_hex = tx3a.serialize().hex()

        tx3a_txid = self.nodes[0].sendrawtransaction(tx3a_hex, 0)

        # This transaction is shown as replaceable
        assert_equal(
            self.nodes[0].getmempoolentry(tx3a_txid)['bip125-replaceable'],
            True)

        tx3b = CTransaction()
        tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
        tx3b.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx3b_hex = tx3b.serialize().hex()

        tx3c = CTransaction()
        tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
        tx3c.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx3c_hex = tx3c.serialize().hex()

        self.nodes[0].sendrawtransaction(tx3b_hex, 0)
        # If tx3b was accepted, tx3c won't look like a replacement,
        # but make sure it is accepted anyway
        self.nodes[0].sendrawtransaction(tx3c_hex, 0)
Пример #20
0
    def test_prioritised_transactions(self):
        # Ensure that fee deltas used via prioritisetransaction are
        # correctly used by replacement logic

        # 1. Check that feeperkb uses modified fees
        tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))

        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
        tx1a_hex = txToHex(tx1a)
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)

        # Higher fee, but the actual fee per KB is much lower.
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 740000]))]
        tx1b_hex = txToHex(tx1b)

        # Verify tx1b cannot replace tx1a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx1b_hex,
                                True)

        # Use prioritisetransaction to set tx1a's fee to 0.
        self.nodes[0].prioritisetransaction(txid=tx1a_txid,
                                            fee_delta=int(-0.1 * COIN))

        # Now tx1b should be able to replace tx1a
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)

        assert tx1b_txid in self.nodes[0].getrawmempool()

        # 2. Check that absolute fee checks use modified fee.
        tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))

        tx2a = CTransaction()
        tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
        tx2a_hex = txToHex(tx2a)
        self.nodes[0].sendrawtransaction(tx2a_hex, True)

        # Lower fee, but we'll prioritise it
        tx2b = CTransaction()
        tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2b.vout = [CTxOut(int(1.01 * COIN), CScript([b'a' * 35]))]
        tx2b.rehash()
        tx2b_hex = txToHex(tx2b)

        # Verify tx2b cannot replace tx2a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx2b_hex,
                                True)

        # Now prioritise tx2b to have a higher modified fee
        self.nodes[0].prioritisetransaction(txid=tx2b.hash,
                                            fee_delta=int(0.1 * COIN))

        # tx2b should now be accepted
        tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)

        assert tx2b_txid in self.nodes[0].getrawmempool()
Пример #21
0
    def run_test(self):
        self.log.info("Mining blocks...")
        self.nodes[0].generate(105)
        self.sync_all()

        chain_height = self.nodes[1].getblockcount()
        assert_equal(chain_height, 105)
        assert_equal(self.nodes[1].getbalance(), 0)
        assert_equal(self.nodes[2].getbalance(), 0)

        # Check that balances are correct
        balance0 = self.nodes[1].getaddressbalance(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br")
        assert_equal(balance0["balance"], 0)

        # Check p2pkh and p2sh address indexes
        self.log.info("Testing p2pkh and p2sh address index...")

        txid0 = self.nodes[0].sendtoaddress(
            "mo9ncXisMeAoXwqcV5EWuyncbmCcQN4rVs", 10)
        self.nodes[0].generate(1)

        txidb0 = self.nodes[0].sendtoaddress(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br", 10)
        self.nodes[0].generate(1)

        txid1 = self.nodes[0].sendtoaddress(
            "mo9ncXisMeAoXwqcV5EWuyncbmCcQN4rVs", 15)
        self.nodes[0].generate(1)

        txidb1 = self.nodes[0].sendtoaddress(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br", 15)
        self.nodes[0].generate(1)

        txid2 = self.nodes[0].sendtoaddress(
            "mo9ncXisMeAoXwqcV5EWuyncbmCcQN4rVs", 20)
        self.nodes[0].generate(1)

        txidb2 = self.nodes[0].sendtoaddress(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br", 20)
        self.nodes[0].generate(1)

        self.sync_all()

        txids = self.nodes[1].getaddresstxids(
            "mo9ncXisMeAoXwqcV5EWuyncbmCcQN4rVs")
        assert_equal(len(txids), 3)
        assert_equal(txids[0], txid0)
        assert_equal(txids[1], txid1)
        assert_equal(txids[2], txid2)

        txidsb = self.nodes[1].getaddresstxids(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br")
        assert_equal(len(txidsb), 3)
        assert_equal(txidsb[0], txidb0)
        assert_equal(txidsb[1], txidb1)
        assert_equal(txidsb[2], txidb2)

        # Check that limiting by height works
        self.log.info("Testing querying txids by range of block heights..")
        height_txids = self.nodes[1].getaddresstxids({
            "addresses": ["2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br"],
            "start":
            105,
            "end":
            110
        })
        assert_equal(len(height_txids), 2)
        assert_equal(height_txids[0], txidb0)
        assert_equal(height_txids[1], txidb1)

        # Check that multiple addresses works
        multitxids = self.nodes[1].getaddresstxids({
            "addresses": [
                "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br",
                "mo9ncXisMeAoXwqcV5EWuyncbmCcQN4rVs"
            ]
        })
        assert_equal(len(multitxids), 6)
        assert_equal(multitxids[0], txid0)
        assert_equal(multitxids[1], txidb0)
        assert_equal(multitxids[2], txid1)
        assert_equal(multitxids[3], txidb1)
        assert_equal(multitxids[4], txid2)
        assert_equal(multitxids[5], txidb2)

        # Check that balances are correct
        balance0 = self.nodes[1].getaddressbalance(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br")
        assert_equal(balance0["balance"], 45 * 100000000)

        # Check that outputs with the same address will only return one txid
        self.log.info("Testing for txid uniqueness...")
        addressHash = bytes([
            99, 73, 164, 24, 252, 69, 120, 209, 10, 55, 43, 84, 180, 92, 40,
            12, 200, 196, 56, 47
        ])
        scriptPubKey = CScript([OP_HASH160, addressHash, OP_EQUAL])
        unspent = self.nodes[0].listunspent()
        tx = CTransaction()
        tx.vin = [
            CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))
        ]
        tx.vout = [CTxOut(10, scriptPubKey), CTxOut(11, scriptPubKey)]
        tx.rehash()

        signed_tx = self.nodes[0].signrawtransaction(
            binascii.hexlify(tx.serialize()).decode("utf-8"))
        sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)

        self.nodes[0].generate(1)
        self.sync_all()

        txidsmany = self.nodes[1].getaddresstxids(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br")
        assert_equal(len(txidsmany), 4)
        assert_equal(txidsmany[3], sent_txid)

        # Check that balances are correct
        self.log.info("Testing balances...")
        balance0 = self.nodes[1].getaddressbalance(
            "2N2JD6wb56AfK4tfmM6PwdVmoYk2dCKf4Br")
        assert_equal(balance0["balance"], 45 * 100000000 + 21)

        # Check that balances are correct after spending
        self.log.info("Testing balances after spending...")
        privkey2 = "cSdkPxkAjA4HDr5VHgsebAPDEh9Gyub4HK8UJr2DFGGqKKy4K5sG"
        address2 = "mgY65WSfEmsyYaYPQaXhmXMeBhwp4EcsQW"
        addressHash2 = bytes([
            11, 47, 10, 12, 49, 191, 224, 64, 107, 12, 204, 19, 129, 253, 190,
            49, 25, 70, 218, 220
        ])
        scriptPubKey2 = CScript(
            [OP_DUP, OP_HASH160, addressHash2, OP_EQUALVERIFY, OP_CHECKSIG])
        self.nodes[0].importprivkey(privkey2)

        unspent = self.nodes[0].listunspent()
        tx = CTransaction()
        tx.vin = [
            CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))
        ]
        amount = int(unspent[0]["amount"] * 100000000 - 230000)
        tx.vout = [CTxOut(amount, scriptPubKey2)]
        signed_tx = self.nodes[0].signrawtransaction(
            binascii.hexlify(tx.serialize()).decode("utf-8"))
        spending_txid = self.nodes[0].sendrawtransaction(
            signed_tx["hex"], True)
        self.nodes[0].generate(1)
        self.sync_all()
        balance1 = self.nodes[1].getaddressbalance(address2)
        assert_equal(balance1["balance"], amount)

        tx = CTransaction()
        tx.vin = [CTxIn(COutPoint(int(spending_txid, 16), 0))]
        send_amount = 1 * 100000000 + 12840
        change_amount = amount - send_amount - 230000
        tx.vout = [
            CTxOut(change_amount, scriptPubKey2),
            CTxOut(send_amount, scriptPubKey)
        ]
        tx.rehash()

        signed_tx = self.nodes[0].signrawtransaction(
            binascii.hexlify(tx.serialize()).decode("utf-8"))
        sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
        self.nodes[0].generate(1)
        self.sync_all()

        balance2 = self.nodes[1].getaddressbalance(address2)
        assert_equal(balance2["balance"], change_amount)

        # Check that deltas are returned correctly
        deltas = self.nodes[1].getaddressdeltas({
            "addresses": [address2],
            "start": 1,
            "end": 200
        })
        balance3 = 0
        for delta in deltas:
            balance3 += delta["satoshis"]
        assert_equal(balance3, change_amount)
        assert_equal(deltas[0]["address"], address2)
        assert_equal(deltas[0]["blockindex"], 1)

        # Check that entire range will be queried
        deltasAll = self.nodes[1].getaddressdeltas({"addresses": [address2]})
        assert_equal(len(deltasAll), len(deltas))

        # Check that deltas can be returned from range of block heights
        deltas = self.nodes[1].getaddressdeltas({
            "addresses": [address2],
            "start": 113,
            "end": 113
        })
        assert_equal(len(deltas), 1)

        # Check that unspent outputs can be queried
        self.log.info("Testing utxos...")
        utxos = self.nodes[1].getaddressutxos({"addresses": [address2]})
        assert_equal(len(utxos), 1)
        assert_equal(utxos[0]["satoshis"], change_amount)

        # Check that indexes will be updated with a reorg
        self.log.info("Testing reorg...")

        best_hash = self.nodes[0].getbestblockhash()
        self.nodes[0].invalidateblock(best_hash)
        self.nodes[1].invalidateblock(best_hash)
        self.nodes[2].invalidateblock(best_hash)
        self.nodes[3].invalidateblock(best_hash)
        self.sync_all()

        balance4 = self.nodes[1].getaddressbalance(address2)
        assert_equal(balance4, balance1)

        utxos2 = self.nodes[1].getaddressutxos({"addresses": [address2]})
        assert_equal(len(utxos2), 1)
        assert_equal(utxos2[0]["satoshis"], amount)

        # Check sorting of utxos
        self.nodes[2].generate(150)

        self.nodes[2].sendtoaddress(address2, 50)
        self.nodes[2].generate(1)
        self.nodes[2].sendtoaddress(address2, 50)
        self.nodes[2].generate(1)
        self.sync_all()

        utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]})
        assert_equal(len(utxos3), 3)
        assert_equal(utxos3[0]["height"], 114)
        assert_equal(utxos3[1]["height"], 264)
        assert_equal(utxos3[2]["height"], 265)

        # Check mempool indexing
        self.log.info("Testing mempool indexing...")

        privKey3 = "cVfUn53hAbRrDEuMexyfgDpZPhF7KqXpS8UZevsyTDaugB7HZ3CD"
        address3 = "mw4ynwhS7MmrQ27hr82kgqu7zryNDK26JB"
        addressHash3 = bytes([
            170, 152, 114, 181, 187, 205, 181, 17, 216, 158, 14, 17, 170, 39,
            218, 115, 253, 44, 63, 80
        ])
        scriptPubKey3 = CScript(
            [OP_DUP, OP_HASH160, addressHash3, OP_EQUALVERIFY, OP_CHECKSIG])
        #address4 = "2N8oFVB2vThAKury4vnLquW2zVjsYjjAkYQ"
        scriptPubKey4 = CScript([OP_HASH160, addressHash3, OP_EQUAL])
        unspent = self.nodes[2].listunspent()

        tx = CTransaction()
        tx.vin = [
            CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))
        ]
        amount = int(unspent[0]["amount"] * 100000000 - 230000)
        tx.vout = [CTxOut(amount, scriptPubKey3)]
        tx.rehash()
        signed_tx = self.nodes[2].signrawtransaction(
            binascii.hexlify(tx.serialize()).decode("utf-8"))
        memtxid1 = self.nodes[2].sendrawtransaction(signed_tx["hex"], True)
        time.sleep(2)

        tx2 = CTransaction()
        tx2.vin = [
            CTxIn(COutPoint(int(unspent[1]["txid"], 16), unspent[1]["vout"]))
        ]
        amount = int(unspent[1]["amount"] * 100000000 - 300000)
        tx2.vout = [
            CTxOut(int(amount / 4), scriptPubKey3),
            CTxOut(int(amount / 4), scriptPubKey3),
            CTxOut(int(amount / 4), scriptPubKey4),
            CTxOut(int(amount / 4), scriptPubKey4)
        ]
        tx2.rehash()
        signed_tx2 = self.nodes[2].signrawtransaction(
            binascii.hexlify(tx2.serialize()).decode("utf-8"))
        memtxid2 = self.nodes[2].sendrawtransaction(signed_tx2["hex"], True)
        time.sleep(2)

        mempool = self.nodes[2].getaddressmempool({"addresses": [address3]})
        assert_equal(len(mempool), 3)
        assert_equal(mempool[0]["txid"], memtxid1)
        assert_equal(mempool[0]["address"], address3)
        assert_equal(mempool[0]["index"], 0)
        assert_equal(mempool[1]["txid"], memtxid2)
        assert_equal(mempool[1]["index"], 0)
        assert_equal(mempool[2]["txid"], memtxid2)
        assert_equal(mempool[2]["index"], 1)

        self.nodes[2].generate(1)
        self.sync_all()
        mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]})
        assert_equal(len(mempool2), 0)

        tx = CTransaction()
        tx.vin = [
            CTxIn(COutPoint(int(memtxid2, 16), 0)),
            CTxIn(COutPoint(int(memtxid2, 16), 1))
        ]
        tx.vout = [CTxOut(int(amount / 2 - 340000), scriptPubKey2)]
        tx.rehash()
        self.nodes[2].importprivkey(privKey3)
        signed_tx3 = self.nodes[2].signrawtransaction(
            binascii.hexlify(tx.serialize()).decode("utf-8"))
        self.nodes[2].sendrawtransaction(signed_tx3["hex"], True)
        time.sleep(2)

        mempool3 = self.nodes[2].getaddressmempool({"addresses": [address3]})
        assert_equal(len(mempool3), 2)
        assert_equal(mempool3[0]["prevtxid"], memtxid2)
        assert_equal(mempool3[0]["prevout"], 0)
        assert_equal(mempool3[1]["prevtxid"], memtxid2)
        assert_equal(mempool3[1]["prevout"], 1)

        # sending and receiving to the same address
        privkey1 = "cQY2s58LhzUCmEXN8jtAp1Etnijx78YRZ466w4ikX1V4UpTpbsf8"
        address1 = "myAUWSHnwsQrhuMWv4Br6QsCnpB41vFwHn"
        address1hash = bytes([
            193, 146, 191, 247, 81, 175, 142, 254, 193, 81, 53, 212, 43, 254,
            237, 249, 26, 111, 62, 52
        ])
        address1script = CScript(
            [OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG])

        self.nodes[0].sendtoaddress(address1, 10)
        self.nodes[0].generate(1)
        self.sync_all()

        utxos = self.nodes[1].getaddressutxos({"addresses": [address1]})
        assert_equal(len(utxos), 1)

        tx = CTransaction()
        tx.vin = [
            CTxIn(COutPoint(int(utxos[0]["txid"], 16),
                            utxos[0]["outputIndex"]))
        ]
        amount = int(utxos[0]["satoshis"] - 200000)
        tx.vout = [CTxOut(amount, address1script)]
        tx.rehash()
        self.nodes[0].importprivkey(privkey1)
        signed_tx = self.nodes[0].signrawtransaction(
            binascii.hexlify(tx.serialize()).decode("utf-8"))
        self.nodes[0].sendrawtransaction(signed_tx["hex"], True)

        self.sync_all()
        mempool_deltas = self.nodes[2].getaddressmempool(
            {"addresses": [address1]})
        assert_equal(len(mempool_deltas), 2)

        # Include chaininfo in results
        self.log.info("Testing results with chain info...")

        deltas_with_info = self.nodes[1].getaddressdeltas({
            "addresses": [address2],
            "start":
            1,
            "end":
            200,
            "chainInfo":
            True
        })
        start_block_hash = self.nodes[1].getblockhash(1)
        end_block_hash = self.nodes[1].getblockhash(200)
        assert_equal(deltas_with_info["start"]["height"], 1)
        assert_equal(deltas_with_info["start"]["hash"], start_block_hash)
        assert_equal(deltas_with_info["end"]["height"], 200)
        assert_equal(deltas_with_info["end"]["hash"], end_block_hash)

        utxos_with_info = self.nodes[1].getaddressutxos({
            "addresses": [address2],
            "chainInfo": True
        })
        expected_tip_block_hash = self.nodes[1].getblockhash(267)
        assert_equal(utxos_with_info["height"], 267)
        assert_equal(utxos_with_info["hash"], expected_tip_block_hash)

        self.log.info("All Tests Passed")
Пример #22
0
 def csv_invalidate(self, tx):
     '''Modify the signature in vin 0 of the tx to fail CSV
     Prepends -1 CSV DROP in the scriptSig itself.
     '''
     tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP3, OP_DROP] +
                                   list(CScript(tx.vin[0].scriptSig)))
Пример #23
0
#!/usr/bin/env python3
# Copyright (c) 2019 The Cicoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Useful Script constants and utils."""
from test_framework.script import CScript

# To prevent a "tx-size-small" policy rule error, a transaction has to have a
# non-witness size of at least 82 bytes (MIN_STANDARD_TX_NONWITNESS_SIZE in
# src/policy/policy.h). Considering a Tx with the smallest possible single
# input (blank, empty scriptSig), and with an output omitting the scriptPubKey,
# we get to a minimum size of 60 bytes:
#
# Tx Skeleton: 4 [Version] + 1 [InCount] + 1 [OutCount] + 4 [LockTime] = 10 bytes
# Blank Input: 32 [PrevTxHash] + 4 [Index] + 1 [scriptSigLen] + 4 [SeqNo] = 41 bytes
# Output:      8 [Amount] + 1 [scriptPubKeyLen] = 9 bytes
#
# Hence, the scriptPubKey of the single output has to have a size of at
# least 22 bytes, which corresponds to the size of a P2WPKH scriptPubKey.
# The following script constant consists of a single push of 21 bytes of 'a':
#   <PUSH_21> <21-bytes of 'a'>
# resulting in a 22-byte size. It should be used whenever (small) fake
# scriptPubKeys are needed, to guarantee that the minimum transaction size is
# met.
DUMMY_P2WPKH_SCRIPT = CScript([b'a' * 21])
    def witness_script_test(self):
        # Now test signing transaction to P2SH-P2WSH addresses without wallet
        # Create a new P2SH-P2WSH 1-of-1 multisig address:
        embedded_address = self.nodes[1].getaddressinfo(
            self.nodes[1].getnewaddress())
        embedded_privkey = self.nodes[1].dumpprivkey(
            embedded_address["address"])
        p2sh_p2wsh_address = self.nodes[1].addmultisigaddress(
            1, [embedded_address["pubkey"]], "", "p2sh-segwit")
        # send transaction to P2SH-P2WSH 1-of-1 multisig address
        self.nodes[0].generate(101)
        self.nodes[0].sendtoaddress(p2sh_p2wsh_address["address"], 49.999)
        self.nodes[0].generate(1)
        self.sync_all()
        # Find the UTXO for the transaction node[1] should have received, check witnessScript matches
        unspent_output = self.nodes[1].listunspent(
            0, 999999, [p2sh_p2wsh_address["address"]])[0]
        assert_equal(unspent_output["witnessScript"],
                     p2sh_p2wsh_address["redeemScript"])
        p2sh_redeemScript = CScript([
            OP_0,
            sha256(hex_str_to_bytes(p2sh_p2wsh_address["redeemScript"]))
        ])
        assert_equal(unspent_output["redeemScript"], p2sh_redeemScript.hex())
        # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
        spending_tx = self.nodes[0].createrawtransaction(
            [unspent_output],
            {self.nodes[1].getnewaddress(): Decimal("49.998")})
        spending_tx_signed = self.nodes[0].signrawtransactionwithkey(
            spending_tx, [embedded_privkey], [unspent_output])
        # Check the signing completed successfully
        assert 'complete' in spending_tx_signed
        assert_equal(spending_tx_signed['complete'], True)

        self.log.info('Try with a P2PKH script as the witnessScript')
        embedded_addr_info = self.nodes[1].getaddressinfo(
            self.nodes[1].getnewaddress('', 'legacy'))
        embedded_privkey = self.nodes[1].dumpprivkey(
            embedded_addr_info['address'])
        witness_script = embedded_addr_info['scriptPubKey']
        redeem_script = CScript([OP_0,
                                 sha256(check_script(witness_script))]).hex()
        addr = script_to_p2sh(redeem_script)
        script_pub_key = self.nodes[1].validateaddress(addr)['scriptPubKey']
        # Fund that address
        txid = self.nodes[0].sendtoaddress(addr, 10)
        vout = find_vout_for_address(self.nodes[0], txid, addr)
        self.nodes[0].generate(1)
        # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
        spending_tx = self.nodes[0].createrawtransaction(
            [{
                'txid': txid,
                'vout': vout
            }], {self.nodes[1].getnewaddress(): Decimal("9.999")})
        spending_tx_signed = self.nodes[0].signrawtransactionwithkey(
            spending_tx, [embedded_privkey], [{
                'txid': txid,
                'vout': vout,
                'scriptPubKey': script_pub_key,
                'redeemScript': redeem_script,
                'witnessScript': witness_script,
                'amount': 10
            }])
        # Check the signing completed successfully
        assert 'complete' in spending_tx_signed
        assert_equal(spending_tx_signed['complete'], True)
        self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])

        self.log.info('Try with a P2PK script as the witnessScript')
        embedded_addr_info = self.nodes[1].getaddressinfo(
            self.nodes[1].getnewaddress('', 'legacy'))
        embedded_privkey = self.nodes[1].dumpprivkey(
            embedded_addr_info['address'])
        witness_script = CScript(
            [hex_str_to_bytes(embedded_addr_info['pubkey']),
             OP_CHECKSIG]).hex()
        redeem_script = CScript([OP_0,
                                 sha256(check_script(witness_script))]).hex()
        addr = script_to_p2sh(redeem_script)
        script_pub_key = self.nodes[1].validateaddress(addr)['scriptPubKey']
        # Fund that address
        txid = self.nodes[0].sendtoaddress(addr, 10)
        vout = find_vout_for_address(self.nodes[0], txid, addr)
        self.nodes[0].generate(1)
        # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
        spending_tx = self.nodes[0].createrawtransaction(
            [{
                'txid': txid,
                'vout': vout
            }], {self.nodes[1].getnewaddress(): Decimal("9.999")})
        spending_tx_signed = self.nodes[0].signrawtransactionwithkey(
            spending_tx, [embedded_privkey], [{
                'txid': txid,
                'vout': vout,
                'scriptPubKey': script_pub_key,
                'redeemScript': redeem_script,
                'witnessScript': witness_script,
                'amount': 10
            }])
        # Check the signing completed successfully
        assert 'complete' in spending_tx_signed
        assert_equal(spending_tx_signed['complete'], True)
        self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
    def test_sequence_lock_unconfirmed_inputs(self):
        # Store height so we can easily reset the chain at the end of the test
        cur_height = self.nodes[0].getblockcount()

        # Create a mempool tx.
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
        tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
        tx1.rehash()

        # Anyone-can-spend mempool tx.
        # Sequence lock of 0 should pass.
        tx2 = CTransaction()
        tx2.nVersion = 2
        tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
        tx2.vout = [
            CTxOut(int(tx1.vout[0].nValue - self.relayfee * COIN),
                   CScript([b'a']))
        ]
        tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
        tx2 = FromHex(tx2, tx2_raw)
        tx2.rehash()

        self.nodes[0].sendrawtransaction(tx2_raw)

        # Create a spend of the 0th output of orig_tx with a sequence lock
        # of 1, and test what happens when submitting.
        # orig_tx.vout[0] must be an anyone-can-spend output
        def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock):
            sequence_value = 1
            if not use_height_lock:
                sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG

            tx = CTransaction()
            tx.nVersion = 2
            tx.vin = [
                CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)
            ]
            tx.vout = [
                CTxOut(int(orig_tx.vout[0].nValue - relayfee * COIN),
                       CScript([b'a' * 35]))
            ]
            tx.rehash()

            if (orig_tx.hash in node.getrawmempool()):
                # sendrawtransaction should fail if the tx is in the mempool
                assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                        node.sendrawtransaction, ToHex(tx))
            else:
                # sendrawtransaction should succeed if the tx is not in the mempool
                node.sendrawtransaction(ToHex(tx))

            return tx

        test_nonzero_locks(tx2,
                           self.nodes[0],
                           self.relayfee,
                           use_height_lock=True)
        test_nonzero_locks(tx2,
                           self.nodes[0],
                           self.relayfee,
                           use_height_lock=False)

        # Now mine some blocks, but make sure tx2 doesn't get mined.
        # Use prioritisetransaction to lower the effective feerate to 0
        self.nodes[0].prioritisetransaction(txid=tx2.hash,
                                            fee_delta=int(-self.relayfee *
                                                          COIN))
        cur_time = int(time.time())
        for i in range(10):
            self.nodes[0].setmocktime(cur_time + 600)
            self.nodes[0].generate(1)
            cur_time += 600

        assert tx2.hash in self.nodes[0].getrawmempool()

        test_nonzero_locks(tx2,
                           self.nodes[0],
                           self.relayfee,
                           use_height_lock=True)
        test_nonzero_locks(tx2,
                           self.nodes[0],
                           self.relayfee,
                           use_height_lock=False)

        # Mine tx2, and then try again
        self.nodes[0].prioritisetransaction(txid=tx2.hash,
                                            fee_delta=int(self.relayfee *
                                                          COIN))

        # Advance the time on the node so that we can test timelocks
        self.nodes[0].setmocktime(cur_time + 600)
        self.nodes[0].generate(1)
        assert tx2.hash not in self.nodes[0].getrawmempool()

        # Now that tx2 is not in the mempool, a sequence locked spend should
        # succeed
        tx3 = test_nonzero_locks(tx2,
                                 self.nodes[0],
                                 self.relayfee,
                                 use_height_lock=False)
        assert tx3.hash in self.nodes[0].getrawmempool()

        self.nodes[0].generate(1)
        assert tx3.hash not in self.nodes[0].getrawmempool()

        # One more test, this time using height locks
        tx4 = test_nonzero_locks(tx3,
                                 self.nodes[0],
                                 self.relayfee,
                                 use_height_lock=True)
        assert tx4.hash in self.nodes[0].getrawmempool()

        # Now try combining confirmed and unconfirmed inputs
        tx5 = test_nonzero_locks(tx4,
                                 self.nodes[0],
                                 self.relayfee,
                                 use_height_lock=True)
        assert tx5.hash not in self.nodes[0].getrawmempool()

        utxos = self.nodes[0].listunspent()
        tx5.vin.append(
            CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]),
                  nSequence=1))
        tx5.vout[0].nValue += int(utxos[0]["amount"] * COIN)
        raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"]

        assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                self.nodes[0].sendrawtransaction, raw_tx5)

        # Test mempool-BIP68 consistency after reorg
        #
        # State of the transactions in the last blocks:
        # ... -> [ tx2 ] ->  [ tx3 ]
        #         tip-1        tip
        # And currently tx4 is in the mempool.
        #
        # If we invalidate the tip, tx3 should get added to the mempool, causing
        # tx4 to be removed (fails sequence-lock).
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        assert tx4.hash not in self.nodes[0].getrawmempool()
        assert tx3.hash in self.nodes[0].getrawmempool()

        # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
        # diagram above).
        # This would cause tx2 to be added back to the mempool, which in turn causes
        # tx3 to be removed.
        tip = int(
            self.nodes[0].getblockhash(self.nodes[0].getblockcount() - 1), 16)
        height = self.nodes[0].getblockcount()
        for i in range(2):
            block = create_block(tip, create_coinbase(height), cur_time)
            block.set_base_version(3)
            block.rehash()
            block.solve()
            tip = block.sha256
            height += 1
            self.nodes[0].submitblock(ToHex(block))
            cur_time += 1

        mempool = self.nodes[0].getrawmempool()
        assert tx3.hash not in mempool
        assert tx2.hash in mempool

        # Reset the chain and get rid of the mocktimed-blocks
        self.nodes[0].setmocktime(0)
        self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height +
                                                                 1))
        self.nodes[0].generate(10)
Пример #26
0
 def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
     tx = create_tx_with_script(spend_tx, n, b"", value, script)
     return tx
from test_framework.messages import CTransaction, CTxIn, CTxOut, COutPoint, ToHex, COIN
from test_framework.script import CScript, OP_1, OP_DROP, OP_2, OP_HASH160, OP_EQUAL, hash160, OP_TRUE
from test_framework.test_framework import shitecoinTestFramework
from test_framework.util import (
    assert_equal,
    assert_greater_than,
    assert_greater_than_or_equal,
    assert_raises_rpc_error,
    satoshi_round,
)

# Construct 2 trivial P2SH's and the ScriptSigs that spend them
# So we can create many transactions without needing to spend
# time signing.
REDEEM_SCRIPT_1 = CScript([OP_1, OP_DROP])
REDEEM_SCRIPT_2 = CScript([OP_2, OP_DROP])
P2SH_1 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_1), OP_EQUAL])
P2SH_2 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_2), OP_EQUAL])

# Associated ScriptSig's to spend satisfy P2SH_1 and P2SH_2
SCRIPT_SIG = [
    CScript([OP_TRUE, REDEEM_SCRIPT_1]),
    CScript([OP_TRUE, REDEEM_SCRIPT_2])
]


def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee,
                           fee_increment):
    """Create and send a transaction with a random fee.
Пример #28
0
    def next_block(self,
                   number,
                   spend=None,
                   script=CScript([OP_TRUE]),
                   block_size=0,
                   extra_txns=0):
        if self.tip == None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height)
        coinbase.rehash()
        if spend == None:
            # We need to have something to spend to fill the block.
            assert_equal(block_size, 0)
            block = create_block(base_block_hash, coinbase, block_time)
        else:
            # all but one satoshi to fees
            coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1
            coinbase.rehash()
            block = create_block(base_block_hash, coinbase, block_time)

            # Make sure we have plenty enough to spend going forward.
            spendable_outputs = deque([spend])

            def get_base_transaction():
                # Create the new transaction
                tx = CTransaction()
                # Spend from one of the spendable outputs
                spend = spendable_outputs.popleft()
                tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n)))
                # Add spendable outputs
                for i in range(4):
                    tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
                    spendable_outputs.append(PreviousSpendableOutput(tx, i))
                pad_tx(tx)
                return tx

            tx = get_base_transaction()

            # Make it the same format as transaction added for padding and save the size.
            # It's missing the padding output, so we add a constant to account for it.
            tx.rehash()

            # If a specific script is required, add it.
            if script != None:
                tx.vout.append(CTxOut(1, script))

            # Put some random data into the first transaction of the chain to randomize ids.
            tx.vout.append(
                CTxOut(0, CScript([random.randint(0, 256), OP_RETURN])))

            # Add the transaction to the block
            self.add_transactions_to_block(block, [tx])

            # Add transaction until we reach the expected transaction count
            for _ in range(extra_txns):
                self.add_transactions_to_block(block, [get_base_transaction()])

            # If we have a block size requirement, just fill
            # the block until we get there
            current_block_size = len(block.serialize())
            overage_bytes = 0
            while current_block_size < block_size:
                # We will add a new transaction. That means the size of
                # the field enumerating how many transaction go in the block
                # may change.
                current_block_size -= len(ser_compact_size(len(block.vtx)))
                current_block_size += len(ser_compact_size(len(block.vtx) + 1))

                # Add padding to fill the block.
                left_to_fill = block_size - current_block_size

                # Don't go over the 1 mb limit for a txn
                if left_to_fill > 500000:
                    # Make sure we eat up non-divisible by 100 amounts quickly
                    # Also keep transaction less than 1 MB
                    left_to_fill = 500000 + left_to_fill % 100

                # Create the new transaction
                tx = get_base_transaction()
                pad_tx(tx, left_to_fill - overage_bytes)
                if len(tx.serialize()) + current_block_size > block_size:
                    # Our padding was too big try again
                    overage_bytes += 1
                    continue

                # Add the tx to the list of transactions to be included
                # in the block.
                self.add_transactions_to_block(block, [tx])
                current_block_size += len(tx.serialize())

            # Now that we added a bunch of transaction, we need to recompute
            # the merkle root.
            make_conform_to_ctor(block)
            block.hashMerkleRoot = block.calc_merkle_root()

        # Check that the block size is what's expected
        if block_size > 0:
            assert_equal(len(block.serialize()), block_size)

        # Do PoW, which is cheap on regnet
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block
Пример #29
0
 def p2sh_address_to_script(self, v):
     bare = CScript(hex_str_to_bytes(v['hex']))
     p2sh = CScript(hex_str_to_bytes(v['scriptPubKey']))
     p2wsh = CScript([OP_0, sha256(bare)])
     p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL])
     return ([bare, p2sh, p2wsh, p2sh_p2wsh])
    def run_test(self):
        node = self.nodes[0]
        node.add_p2p_connection(P2PDataStore())

        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # adds transactions to the block and updates state
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            self.add_transactions_to_block(block, new_transactions)
            old_sha256 = block.sha256
            block.hashMerkleRoot = block.calc_merkle_root()
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            if block.sha256 != old_sha256:
                self.block_heights[
                    block.sha256] = self.block_heights[old_sha256]
                del self.block_heights[old_sha256]
            self.blocks[block_number] = block
            return block

        # shorthand for functions
        block = self.next_block

        # Create a new block
        block(0)
        save_spendable_output()
        node.p2p.send_blocks_and_test([self.tip], node)

        # Now we need that block to mature so we can spend the coinbase.
        maturity_blocks = []
        for i in range(99):
            block(5000 + i)
            maturity_blocks.append(self.tip)
            save_spendable_output()
        node.p2p.send_blocks_and_test(maturity_blocks, node)

        # Collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(33):
            out.append(get_spendable_output())

        # P2SH
        # Build the redeem script, hash it, use hash to create the p2sh script
        redeem_script = CScript([self.coinbase_pubkey] +
                                [OP_2DUP, OP_CHECKSIGVERIFY] * 5 +
                                [OP_CHECKSIG])
        redeem_script_hash = hash160(redeem_script)
        p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])

        # Creates a new transaction using a p2sh transaction as input
        def spend_p2sh_tx(p2sh_tx_to_spend, output_script=CScript([OP_TRUE])):
            # Create the transaction
            spent_p2sh_tx = CTransaction()
            spent_p2sh_tx.vin.append(
                CTxIn(COutPoint(p2sh_tx_to_spend.sha256, 0), b''))
            spent_p2sh_tx.vout.append(CTxOut(1, output_script))
            # Sign the transaction using the redeem script
            sighash = SignatureHashForkId(redeem_script, spent_p2sh_tx, 0,
                                          SIGHASH_ALL | SIGHASH_FORKID,
                                          p2sh_tx_to_spend.vout[0].nValue)
            sig = self.coinbase_key.sign(sighash) + bytes(
                bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            spent_p2sh_tx.vin[0].scriptSig = CScript([sig, redeem_script])
            spent_p2sh_tx.rehash()
            return spent_p2sh_tx

        # P2SH tests
        # Create a p2sh transaction
        p2sh_tx = self.create_and_sign_transaction(out[0].tx, out[0].n, 1,
                                                   p2sh_script)

        # Add the transaction to the block
        block(1)
        update_block(1, [p2sh_tx])
        node.p2p.send_blocks_and_test([self.tip], node)

        # Sigops p2sh limit for the mempool test
        p2sh_sigops_limit_mempool = MAX_STANDARD_TX_SIGOPS - \
            redeem_script.GetSigOpCount(True)
        # Too many sigops in one p2sh script
        too_many_p2sh_sigops_mempool = CScript([OP_CHECKSIG] *
                                               (p2sh_sigops_limit_mempool + 1))

        # A transaction with this output script can't get into the mempool
        assert_raises_rpc_error(
            -26, RPC_TXNS_TOO_MANY_SIGOPS_ERROR, node.sendrawtransaction,
            ToHex(spend_p2sh_tx(p2sh_tx, too_many_p2sh_sigops_mempool)))

        # The transaction is rejected, so the mempool should still be empty
        assert_equal(set(node.getrawmempool()), set())

        # Max sigops in one p2sh txn
        max_p2sh_sigops_mempool = CScript([OP_CHECKSIG] *
                                          (p2sh_sigops_limit_mempool))

        # A transaction with this output script can get into the mempool
        max_p2sh_sigops_txn = spend_p2sh_tx(p2sh_tx, max_p2sh_sigops_mempool)
        max_p2sh_sigops_txn_id = node.sendrawtransaction(
            ToHex(max_p2sh_sigops_txn))
        assert_equal(set(node.getrawmempool()), {max_p2sh_sigops_txn_id})

        # Mine the transaction
        block(2, spend=out[1])
        update_block(2, [max_p2sh_sigops_txn])
        node.p2p.send_blocks_and_test([self.tip], node)

        # The transaction has been mined, it's not in the mempool anymore
        assert_equal(set(node.getrawmempool()), set())