예제 #1
0
    def run_test(self):
        wallet = MiniWallet(self.nodes[0])

        # Invalidate two blocks, so that miniwallet has access to a coin that will mature in the next block
        chain_height = 198
        self.nodes[0].invalidateblock(self.nodes[0].getblockhash(chain_height + 1))
        assert_equal(chain_height, self.nodes[0].getblockcount())
        wallet.rescan_utxos()

        # Coinbase at height chain_height-100+1 ok in mempool, should
        # get mined. Coinbase at height chain_height-100+2 is
        # too immature to spend.
        coinbase_txid = lambda h: self.nodes[0].getblock(self.nodes[0].getblockhash(h))['tx'][0]
        utxo_mature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 + 1))
        utxo_immature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 + 2))

        spend_mature_id = wallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=utxo_mature)["txid"]

        # other coinbase should be too immature to spend
        immature_tx = wallet.create_self_transfer(from_node=self.nodes[0], utxo_to_spend=utxo_immature, mempool_valid=False)
        assert_raises_rpc_error(-26,
                                "bad-txns-premature-spend-of-coinbase",
                                lambda: self.nodes[0].sendrawtransaction(immature_tx['hex']))

        # mempool should have just the mature one
        assert_equal(self.nodes[0].getrawmempool(), [spend_mature_id])

        # mine a block, mature one should get confirmed
        self.generate(self.nodes[0], 1, sync_fun=self.no_op)
        assert_equal(set(self.nodes[0].getrawmempool()), set())

        # ... and now previously immature can be spent:
        spend_new_id = self.nodes[0].sendrawtransaction(immature_tx['hex'])
        assert_equal(self.nodes[0].getrawmempool(), [spend_new_id])
예제 #2
0
    def run_test(self):
        mini_wallet = MiniWallet(self.nodes[1])
        mini_wallet.rescan_utxos()
        spend_utxo = mini_wallet.get_utxo()
        mini_wallet.send_self_transfer(from_node=self.nodes[1],
                                       utxo_to_spend=spend_utxo)
        self.generate(self.nodes[1], 1)

        self.log.info("Check legacy txindex")
        self.nodes[0].getrawtransaction(
            txid=spend_utxo["txid"])  # Requires -txindex

        self.stop_nodes()
        legacy_chain_dir = os.path.join(self.nodes[0].datadir, self.chain)

        self.log.info("Migrate legacy txindex")
        migrate_chain_dir = os.path.join(self.nodes[2].datadir, self.chain)
        shutil.rmtree(migrate_chain_dir)
        shutil.copytree(legacy_chain_dir, migrate_chain_dir)
        with self.nodes[2].assert_debug_log([
                "Upgrading txindex database...",
                "txindex is enabled at height 200",
        ]):
            self.start_node(2, extra_args=["-txindex"])
        self.nodes[2].getrawtransaction(
            txid=spend_utxo["txid"])  # Requires -txindex

        self.log.info("Drop legacy txindex")
        drop_index_chain_dir = os.path.join(self.nodes[1].datadir, self.chain)
        shutil.rmtree(drop_index_chain_dir)
        shutil.copytree(legacy_chain_dir, drop_index_chain_dir)
        self.nodes[1].assert_start_raises_init_error(
            extra_args=["-txindex"],
            expected_msg=
            "Error: The block index db contains a legacy 'txindex'. To clear the occupied disk space, run a full -reindex, otherwise ignore this error. This error message will not be displayed again.",
        )
        # Build txindex from scratch and check there is no error this time
        self.start_node(1, extra_args=["-txindex"])
        self.nodes[2].getrawtransaction(
            txid=spend_utxo["txid"])  # Requires -txindex

        self.stop_nodes()

        self.log.info("Check migrated txindex can not be read by legacy node")
        err_msg = f": You need to rebuild the database using -reindex to change -txindex.{os.linesep}Please restart with -reindex or -reindex-chainstate to recover."
        shutil.rmtree(legacy_chain_dir)
        shutil.copytree(migrate_chain_dir, legacy_chain_dir)
        self.nodes[0].assert_start_raises_init_error(extra_args=["-txindex"],
                                                     expected_msg=err_msg)
        shutil.rmtree(legacy_chain_dir)
        shutil.copytree(drop_index_chain_dir, legacy_chain_dir)
        self.nodes[0].assert_start_raises_init_error(extra_args=["-txindex"],
                                                     expected_msg=err_msg)
class MempoolFeeFieldsDeprecationTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 2
        self.extra_args = [[], ["-deprecatedrpc=fees"]]

    def run_test(self):
        # we get spendable outputs from the premined chain starting
        # at block 76. see BitcoinTestFramework._initialize_chain() for details
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()

        # we create the tx on the first node and wait until it syncs to node_deprecated
        # thus, any differences must be coming from getmempoolentry or getrawmempool
        tx = self.wallet.send_self_transfer(from_node=self.nodes[0])
        self.nodes[1].sendrawtransaction(tx["hex"])

        deprecated_fields = [
            "ancestorfees", "descendantfees", "modifiedfee", "fee"
        ]
        self.test_getmempoolentry(tx["txid"], deprecated_fields)
        self.test_getrawmempool(tx["txid"], deprecated_fields)
        self.test_deprecated_fields_match(tx["txid"])

    def test_getmempoolentry(self, txid, deprecated_fields):

        self.log.info("Test getmempoolentry rpc")
        entry = self.nodes[0].getmempoolentry(txid)
        deprecated_entry = self.nodes[1].getmempoolentry(txid)
        assertions_helper(entry, deprecated_entry, deprecated_fields)

    def test_getrawmempool(self, txid, deprecated_fields):

        self.log.info("Test getrawmempool rpc")
        entry = self.nodes[0].getrawmempool(verbose=True)[txid]
        deprecated_entry = self.nodes[1].getrawmempool(verbose=True)[txid]
        assertions_helper(entry, deprecated_entry, deprecated_fields)

    def test_deprecated_fields_match(self, txid):

        self.log.info("Test deprecated fee fields match new fees object")
        entry = self.nodes[0].getmempoolentry(txid)
        deprecated_entry = self.nodes[1].getmempoolentry(txid)

        assert_equal(deprecated_entry["fee"], entry["fees"]["base"])
        assert_equal(deprecated_entry["modifiedfee"],
                     entry["fees"]["modified"])
        assert_equal(deprecated_entry["descendantfees"],
                     entry["fees"]["descendant"] * COIN)
        assert_equal(deprecated_entry["ancestorfees"],
                     entry["fees"]["ancestor"] * COIN)
예제 #4
0
    def _test_getblock(self):
        node = self.nodes[0]

        miniwallet = MiniWallet(node)
        miniwallet.rescan_utxos()

        fee_per_byte = Decimal('0.00000010')
        fee_per_kb = 1000 * fee_per_byte

        miniwallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node)
        blockhash = self.generate(node, 1)[0]

        self.log.info("Test getblock with verbosity 1 doesn't include fee")
        block = node.getblock(blockhash, 1)
        assert 'fee' not in block['tx'][1]

        self.log.info('Test getblock with verbosity 2 includes expected fee')
        block = node.getblock(blockhash, 2)
        tx = block['tx'][1]
        assert 'fee' in tx
        assert_equal(tx['fee'], tx['vsize'] * fee_per_byte)

        self.log.info(
            "Test getblock with verbosity 2 still works with pruned Undo data")
        datadir = get_datadir_path(self.options.tmpdir, 0)

        self.log.info(
            "Test getblock with invalid verbosity type returns proper error message"
        )
        assert_raises_rpc_error(-1, "JSON value is not an integer as expected",
                                node.getblock, blockhash, "2")

        def move_block_file(old, new):
            old_path = os.path.join(datadir, self.chain, 'blocks', old)
            new_path = os.path.join(datadir, self.chain, 'blocks', new)
            os.rename(old_path, new_path)

        # Move instead of deleting so we can restore chain state afterwards
        move_block_file('rev00000.dat', 'rev_wrong')

        block = node.getblock(blockhash, 2)
        assert 'fee' not in block['tx'][1]

        # Restore chain state
        move_block_file('rev_wrong', 'rev00000.dat')

        assert 'previousblockhash' not in node.getblock(node.getblockhash(0))
        assert 'nextblockhash' not in node.getblock(node.getbestblockhash())
예제 #5
0
    def run_test(self):
        wallet = MiniWallet(self.nodes[0])

        # Invalidate two blocks, so that miniwallet has access to a coin that will mature in the next block
        chain_height = 198
        self.nodes[0].invalidateblock(self.nodes[0].getblockhash(chain_height +
                                                                 1))
        assert_equal(chain_height, self.nodes[0].getblockcount())
        wallet.rescan_utxos()

        # Coinbase at height chain_height-100+1 ok in mempool, should
        # get mined. Coinbase at height chain_height-100+2 is
        # too immature to spend.
        coinbase_txid = lambda h: self.nodes[0].getblock(self.nodes[
            0].getblockhash(h))['tx'][0]
        utxo_mature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 +
                                                         1))
        utxo_immature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 +
                                                           2))

        spend_101_id = wallet.send_self_transfer(
            from_node=self.nodes[0], utxo_to_spend=utxo_101)["txid"]

        # coinbase at height 102 should be too immature to spend
        assert_raises_rpc_error(
            -26, "bad-txns-premature-spend-of-coinbase",
            lambda: wallet.send_self_transfer(from_node=self.nodes[0],
                                              utxo_to_spend=utxo_102))

        # mempool should have just spend_101:
        assert_equal(self.nodes[0].getrawmempool(), [spend_101_id])

        # mine a block, mature one should get confirmed
        self.generate(self.nodes[0], 1)
        assert_equal(set(self.nodes[0].getrawmempool()), set())

        # ... and now height 102 can be spent:
        spend_102_id = wallet.send_self_transfer(
            from_node=self.nodes[0], utxo_to_spend=utxo_102)["txid"]
        assert_equal(self.nodes[0].getrawmempool(), [spend_102_id])
예제 #6
0
    def test_too_many_replacements_with_default_mempool_params(self):
        """
        Test rule 5 of BIP125 (do not allow replacements that cause more than 100
        evictions) without having to rely on non-default mempool parameters.

        In order to do this, create a number of "root" UTXOs, and then hang
        enough transactions off of each root UTXO to exceed the MAX_REPLACEMENT_LIMIT.
        Then create a conflicting RBF replacement transaction.
        """
        normal_node = self.nodes[1]
        wallet = MiniWallet(normal_node)
        wallet.rescan_utxos()
        # Clear mempools to avoid cross-node sync failure.
        for node in self.nodes:
            self.generate(node, 1)

        # This has to be chosen so that the total number of transactions can exceed
        # MAX_REPLACEMENT_LIMIT without having any one tx graph run into the descendant
        # limit; 10 works.
        num_tx_graphs = 10

        # (Number of transactions per graph, BIP125 rule 5 failure expected)
        cases = [
            # Test the base case of evicting fewer than MAX_REPLACEMENT_LIMIT
            # transactions.
            ((MAX_REPLACEMENT_LIMIT // num_tx_graphs) - 1, False),

            # Test hitting the rule 5 eviction limit.
            (MAX_REPLACEMENT_LIMIT // num_tx_graphs, True),
        ]

        for (txs_per_graph, failure_expected) in cases:
            self.log.debug(
                f"txs_per_graph: {txs_per_graph}, failure: {failure_expected}")
            # "Root" utxos of each txn graph that we will attempt to double-spend with
            # an RBF replacement.
            root_utxos = []

            # For each root UTXO, create a package that contains the spend of that
            # UTXO and `txs_per_graph` children tx.
            for graph_num in range(num_tx_graphs):
                root_utxos.append(wallet.get_utxo())

                optin_parent_tx = wallet.send_self_transfer_multi(
                    from_node=normal_node,
                    sequence=BIP125_SEQUENCE_NUMBER,
                    utxos_to_spend=[root_utxos[graph_num]],
                    num_outputs=txs_per_graph,
                )
                assert_equal(
                    True,
                    normal_node.getmempoolentry(
                        optin_parent_tx['txid'])['bip125-replaceable'])
                new_utxos = optin_parent_tx['new_utxos']

                for utxo in new_utxos:
                    # Create spends for each output from the "root" of this graph.
                    child_tx = wallet.send_self_transfer(
                        from_node=normal_node,
                        utxo_to_spend=utxo,
                    )

                    assert normal_node.getmempoolentry(child_tx['txid'])

            num_txs_invalidated = len(root_utxos) + (num_tx_graphs *
                                                     txs_per_graph)

            if failure_expected:
                assert num_txs_invalidated > MAX_REPLACEMENT_LIMIT
            else:
                assert num_txs_invalidated <= MAX_REPLACEMENT_LIMIT

            # Now attempt to submit a tx that double-spends all the root tx inputs, which
            # would invalidate `num_txs_invalidated` transactions.
            tx_hex = wallet.create_self_transfer_multi(
                utxos_to_spend=root_utxos,
                fee_per_output=10_000_000,  # absurdly high feerate
            )["hex"]

            if failure_expected:
                assert_raises_rpc_error(-26, "too many potential replacements",
                                        normal_node.sendrawtransaction, tx_hex,
                                        0)
            else:
                txid = normal_node.sendrawtransaction(tx_hex, 0)
                assert normal_node.getmempoolentry(txid)

        # Clear the mempool once finished, and rescan the other nodes' wallet
        # to account for the spends we've made on `normal_node`.
        self.generate(normal_node, 1)
        self.wallet.rescan_utxos()
예제 #7
0
    def _test_getblock(self):
        node = self.nodes[0]

        miniwallet = MiniWallet(node)
        miniwallet.rescan_utxos()

        fee_per_byte = Decimal('0.00000010')
        fee_per_kb = 1000 * fee_per_byte

        miniwallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node)
        blockhash = self.generate(node, 1)[0]

        def assert_fee_not_in_block(verbosity):
            block = node.getblock(blockhash, verbosity)
            assert 'fee' not in block['tx'][1]

        def assert_fee_in_block(verbosity):
            block = node.getblock(blockhash, verbosity)
            tx = block['tx'][1]
            assert 'fee' in tx
            assert_equal(tx['fee'], tx['vsize'] * fee_per_byte)

        def assert_vin_contains_prevout(verbosity):
            block = node.getblock(blockhash, verbosity)
            tx = block["tx"][1]
            total_vin = Decimal("0.00000000")
            total_vout = Decimal("0.00000000")
            for vin in tx["vin"]:
                assert "prevout" in vin
                assert_equal(
                    set(vin["prevout"].keys()),
                    set(("value", "height", "generated", "scriptPubKey")))
                assert_equal(vin["prevout"]["generated"], True)
                total_vin += vin["prevout"]["value"]
            for vout in tx["vout"]:
                total_vout += vout["value"]
            assert_equal(total_vin, total_vout + tx["fee"])

        def assert_vin_does_not_contain_prevout(verbosity):
            block = node.getblock(blockhash, verbosity)
            tx = block["tx"][1]
            if isinstance(tx, str):
                # In verbosity level 1, only the transaction hashes are written
                pass
            else:
                for vin in tx["vin"]:
                    assert "prevout" not in vin

        self.log.info(
            "Test that getblock with verbosity 1 doesn't include fee")
        assert_fee_not_in_block(1)

        self.log.info(
            'Test that getblock with verbosity 2 and 3 includes expected fee')
        assert_fee_in_block(2)
        assert_fee_in_block(3)

        self.log.info(
            "Test that getblock with verbosity 1 and 2 does not include prevout"
        )
        assert_vin_does_not_contain_prevout(1)
        assert_vin_does_not_contain_prevout(2)

        self.log.info("Test that getblock with verbosity 3 includes prevout")
        assert_vin_contains_prevout(3)

        self.log.info(
            "Test that getblock with verbosity 2 and 3 still works with pruned Undo data"
        )
        datadir = get_datadir_path(self.options.tmpdir, 0)

        self.log.info(
            "Test getblock with invalid verbosity type returns proper error message"
        )
        assert_raises_rpc_error(-1, "JSON value is not an integer as expected",
                                node.getblock, blockhash, "2")

        def move_block_file(old, new):
            old_path = os.path.join(datadir, self.chain, 'blocks', old)
            new_path = os.path.join(datadir, self.chain, 'blocks', new)
            os.rename(old_path, new_path)

        # Move instead of deleting so we can restore chain state afterwards
        move_block_file('rev00000.dat', 'rev_wrong')

        assert_fee_not_in_block(2)
        assert_fee_not_in_block(3)
        assert_vin_does_not_contain_prevout(2)
        assert_vin_does_not_contain_prevout(3)

        # Restore chain state
        move_block_file('rev_wrong', 'rev00000.dat')

        assert 'previousblockhash' not in node.getblock(node.getblockhash(0))
        assert 'nextblockhash' not in node.getblock(node.getbestblockhash())
예제 #8
0
class RESTTest(UmkoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 2
        self.extra_args = [["-rest", "-blockfilterindex=1"], []]
        # whitelist peers to speed up tx relay / mempool sync
        for args in self.extra_args:
            args.append("[email protected]")
        self.supports_cli = False

    def test_rest_request(self,
                          uri,
                          http_method='GET',
                          req_type=ReqType.JSON,
                          body='',
                          status=200,
                          ret_type=RetType.JSON):
        rest_uri = '/rest' + uri
        if req_type == ReqType.JSON:
            rest_uri += '.json'
        elif req_type == ReqType.BIN:
            rest_uri += '.bin'
        elif req_type == ReqType.HEX:
            rest_uri += '.hex'

        conn = http.client.HTTPConnection(self.url.hostname, self.url.port)
        self.log.debug(f'{http_method} {rest_uri} {body}')
        if http_method == 'GET':
            conn.request('GET', rest_uri)
        elif http_method == 'POST':
            conn.request('POST', rest_uri, body)
        resp = conn.getresponse()

        assert_equal(resp.status, status)

        if ret_type == RetType.OBJ:
            return resp
        elif ret_type == RetType.BYTES:
            return resp.read()
        elif ret_type == RetType.JSON:
            return json.loads(resp.read().decode('utf-8'), parse_float=Decimal)

    def run_test(self):
        self.url = urllib.parse.urlparse(self.nodes[0].url)
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()

        self.log.info("Broadcast test transaction and sync nodes")
        txid, _ = self.wallet.send_to(from_node=self.nodes[0],
                                      scriptPubKey=getnewdestination()[1],
                                      amount=int(0.1 * COIN))
        self.sync_all()

        self.log.info("Test the /tx URI")

        json_obj = self.test_rest_request(f"/tx/{txid}")
        assert_equal(json_obj['txid'], txid)

        # Check hex format response
        hex_response = self.test_rest_request(f"/tx/{txid}",
                                              req_type=ReqType.HEX,
                                              ret_type=RetType.OBJ)
        assert_greater_than_or_equal(
            int(hex_response.getheader('content-length')),
            json_obj['size'] * 2)

        spent = (
            json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']
        )  # get the vin to later check for utxo (should be spent by then)
        # get n of 0.1 outpoint
        n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1'))
        spending = (txid, n)

        # Test /tx with an invalid and an unknown txid
        resp = self.test_rest_request(uri=f"/tx/{INVALID_PARAM}",
                                      ret_type=RetType.OBJ,
                                      status=400)
        assert_equal(resp.read().decode('utf-8').rstrip(),
                     f"Invalid hash: {INVALID_PARAM}")
        resp = self.test_rest_request(uri=f"/tx/{UNKNOWN_PARAM}",
                                      ret_type=RetType.OBJ,
                                      status=404)
        assert_equal(resp.read().decode('utf-8').rstrip(),
                     f"{UNKNOWN_PARAM} not found")

        self.log.info("Query an unspent TXO using the /getutxos URI")

        self.generate(self.wallet, 1)
        bb_hash = self.nodes[0].getbestblockhash()

        # Check chainTip response
        json_obj = self.test_rest_request(
            f"/getutxos/{spending[0]}-{spending[1]}")
        assert_equal(json_obj['chaintipHash'], bb_hash)

        # Make sure there is one utxo
        assert_equal(len(json_obj['utxos']), 1)
        assert_equal(json_obj['utxos'][0]['value'], Decimal('0.1'))

        self.log.info("Query a spent TXO using the /getutxos URI")

        json_obj = self.test_rest_request(f"/getutxos/{spent[0]}-{spent[1]}")

        # Check chainTip response
        assert_equal(json_obj['chaintipHash'], bb_hash)

        # Make sure there is no utxo in the response because this outpoint has been spent
        assert_equal(len(json_obj['utxos']), 0)

        # Check bitmap
        assert_equal(json_obj['bitmap'], "0")

        self.log.info("Query two TXOs using the /getutxos URI")

        json_obj = self.test_rest_request(
            f"/getutxos/{spending[0]}-{spending[1]}/{spent[0]}-{spent[1]}")

        assert_equal(len(json_obj['utxos']), 1)
        assert_equal(json_obj['bitmap'], "10")

        self.log.info(
            "Query the TXOs using the /getutxos URI with a binary response")

        bin_request = b'\x01\x02'
        for txid, n in [spending, spent]:
            bin_request += bytes.fromhex(txid)
            bin_request += pack("i", n)

        bin_response = self.test_rest_request("/getutxos",
                                              http_method='POST',
                                              req_type=ReqType.BIN,
                                              body=bin_request,
                                              ret_type=RetType.BYTES)
        output = BytesIO(bin_response)
        chain_height, = unpack("<i", output.read(4))
        response_hash = output.read(32)[::-1].hex()

        assert_equal(
            bb_hash, response_hash
        )  # check if getutxo's chaintip during calculation was fine
        assert_equal(
            chain_height, 201
        )  # chain height must be 201 (pre-mined chain [200] + generated block [1])

        self.log.info("Test the /getutxos URI with and without /checkmempool")
        # Create a transaction, check that it's found with /checkmempool, but
        # not found without. Then confirm the transaction and check that it's
        # found with or without /checkmempool.

        # do a tx and don't sync
        txid, _ = self.wallet.send_to(from_node=self.nodes[0],
                                      scriptPubKey=getnewdestination()[1],
                                      amount=int(0.1 * COIN))
        json_obj = self.test_rest_request(f"/tx/{txid}")
        # get the spent output to later check for utxo (should be spent by then)
        spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout'])
        # get n of 0.1 outpoint
        n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1'))
        spending = (txid, n)

        json_obj = self.test_rest_request(
            f"/getutxos/{spending[0]}-{spending[1]}")
        assert_equal(len(json_obj['utxos']), 0)

        json_obj = self.test_rest_request(
            f"/getutxos/checkmempool/{spending[0]}-{spending[1]}")
        assert_equal(len(json_obj['utxos']), 1)

        json_obj = self.test_rest_request(f"/getutxos/{spent[0]}-{spent[1]}")
        assert_equal(len(json_obj['utxos']), 1)

        json_obj = self.test_rest_request(
            f"/getutxos/checkmempool/{spent[0]}-{spent[1]}")
        assert_equal(len(json_obj['utxos']), 0)

        self.generate(self.nodes[0], 1)

        json_obj = self.test_rest_request(
            f"/getutxos/{spending[0]}-{spending[1]}")
        assert_equal(len(json_obj['utxos']), 1)

        json_obj = self.test_rest_request(
            f"/getutxos/checkmempool/{spending[0]}-{spending[1]}")
        assert_equal(len(json_obj['utxos']), 1)

        # Do some invalid requests
        self.test_rest_request("/getutxos",
                               http_method='POST',
                               req_type=ReqType.JSON,
                               body='{"checkmempool',
                               status=400,
                               ret_type=RetType.OBJ)
        self.test_rest_request("/getutxos",
                               http_method='POST',
                               req_type=ReqType.BIN,
                               body='{"checkmempool',
                               status=400,
                               ret_type=RetType.OBJ)
        self.test_rest_request("/getutxos/checkmempool",
                               http_method='POST',
                               req_type=ReqType.JSON,
                               status=400,
                               ret_type=RetType.OBJ)

        # Test limits
        long_uri = '/'.join([f"{txid}-{n_}" for n_ in range(20)])
        self.test_rest_request(f"/getutxos/checkmempool/{long_uri}",
                               http_method='POST',
                               status=400,
                               ret_type=RetType.OBJ)

        long_uri = '/'.join([f'{txid}-{n_}' for n_ in range(15)])
        self.test_rest_request(f"/getutxos/checkmempool/{long_uri}",
                               http_method='POST',
                               status=200)

        self.generate(self.nodes[0],
                      1)  # generate block to not affect upcoming tests

        self.log.info("Test the /block, /blockhashbyheight and /headers URIs")
        bb_hash = self.nodes[0].getbestblockhash()

        # Check result if block does not exists
        assert_equal(self.test_rest_request(f"/headers/1/{UNKNOWN_PARAM}"), [])
        self.test_rest_request(f"/block/{UNKNOWN_PARAM}",
                               status=404,
                               ret_type=RetType.OBJ)

        # Check result if block is not in the active chain
        self.nodes[0].invalidateblock(bb_hash)
        assert_equal(self.test_rest_request(f'/headers/1/{bb_hash}'), [])
        self.test_rest_request(f'/block/{bb_hash}')
        self.nodes[0].reconsiderblock(bb_hash)

        # Check binary format
        response = self.test_rest_request(f"/block/{bb_hash}",
                                          req_type=ReqType.BIN,
                                          ret_type=RetType.OBJ)
        assert_greater_than(int(response.getheader('content-length')),
                            BLOCK_HEADER_SIZE)
        response_bytes = response.read()

        # Compare with block header
        response_header = self.test_rest_request(f"/headers/1/{bb_hash}",
                                                 req_type=ReqType.BIN,
                                                 ret_type=RetType.OBJ)
        assert_equal(int(response_header.getheader('content-length')),
                     BLOCK_HEADER_SIZE)
        response_header_bytes = response_header.read()
        assert_equal(response_bytes[:BLOCK_HEADER_SIZE], response_header_bytes)

        # Check block hex format
        response_hex = self.test_rest_request(f"/block/{bb_hash}",
                                              req_type=ReqType.HEX,
                                              ret_type=RetType.OBJ)
        assert_greater_than(int(response_hex.getheader('content-length')),
                            BLOCK_HEADER_SIZE * 2)
        response_hex_bytes = response_hex.read().strip(b'\n')
        assert_equal(response_bytes.hex().encode(), response_hex_bytes)

        # Compare with hex block header
        response_header_hex = self.test_rest_request(f"/headers/1/{bb_hash}",
                                                     req_type=ReqType.HEX,
                                                     ret_type=RetType.OBJ)
        assert_greater_than(
            int(response_header_hex.getheader('content-length')),
            BLOCK_HEADER_SIZE * 2)
        response_header_hex_bytes = response_header_hex.read(
            BLOCK_HEADER_SIZE * 2)
        assert_equal(response_bytes[:BLOCK_HEADER_SIZE].hex().encode(),
                     response_header_hex_bytes)

        # Check json format
        block_json_obj = self.test_rest_request(f"/block/{bb_hash}")
        assert_equal(block_json_obj['hash'], bb_hash)
        assert_equal(
            self.test_rest_request(
                f"/blockhashbyheight/{block_json_obj['height']}")['blockhash'],
            bb_hash)

        # Check hex/bin format
        resp_hex = self.test_rest_request(
            f"/blockhashbyheight/{block_json_obj['height']}",
            req_type=ReqType.HEX,
            ret_type=RetType.OBJ)
        assert_equal(resp_hex.read().decode('utf-8').rstrip(), bb_hash)
        resp_bytes = self.test_rest_request(
            f"/blockhashbyheight/{block_json_obj['height']}",
            req_type=ReqType.BIN,
            ret_type=RetType.BYTES)
        blockhash = resp_bytes[::-1].hex()
        assert_equal(blockhash, bb_hash)

        # Check invalid blockhashbyheight requests
        resp = self.test_rest_request(f"/blockhashbyheight/{INVALID_PARAM}",
                                      ret_type=RetType.OBJ,
                                      status=400)
        assert_equal(resp.read().decode('utf-8').rstrip(),
                     f"Invalid height: {INVALID_PARAM}")
        resp = self.test_rest_request("/blockhashbyheight/1000000",
                                      ret_type=RetType.OBJ,
                                      status=404)
        assert_equal(resp.read().decode('utf-8').rstrip(),
                     "Block height out of range")
        resp = self.test_rest_request("/blockhashbyheight/-1",
                                      ret_type=RetType.OBJ,
                                      status=400)
        assert_equal(resp.read().decode('utf-8').rstrip(),
                     "Invalid height: -1")
        self.test_rest_request("/blockhashbyheight/",
                               ret_type=RetType.OBJ,
                               status=400)

        # Compare with json block header
        json_obj = self.test_rest_request(f"/headers/1/{bb_hash}")
        assert_equal(len(json_obj),
                     1)  # ensure that there is one header in the json response
        assert_equal(json_obj[0]['hash'],
                     bb_hash)  # request/response hash should be the same

        # Compare with normal RPC block response
        rpc_block_json = self.nodes[0].getblock(bb_hash)
        for key in [
                'hash', 'confirmations', 'height', 'version', 'merkleroot',
                'time', 'nonce', 'bits', 'difficulty', 'chainwork',
                'previousblockhash'
        ]:
            assert_equal(json_obj[0][key], rpc_block_json[key])

        # See if we can get 5 headers in one response
        self.generate(self.nodes[1], 5)
        json_obj = self.test_rest_request(f"/headers/5/{bb_hash}")
        assert_equal(len(json_obj), 5)  # now we should have 5 header objects
        json_obj = self.test_rest_request(
            f"/blockfilterheaders/basic/5/{bb_hash}")
        first_filter_header = json_obj[0]
        assert_equal(len(json_obj),
                     5)  # now we should have 5 filter header objects
        json_obj = self.test_rest_request(f"/blockfilter/basic/{bb_hash}")

        # Compare with normal RPC blockfilter response
        rpc_blockfilter = self.nodes[0].getblockfilter(bb_hash)
        assert_equal(first_filter_header, rpc_blockfilter['header'])
        assert_equal(json_obj['filter'], rpc_blockfilter['filter'])

        # Test number parsing
        for num in [
                '5a', '-5', '0', '2001', '99999999999999999999999999999999999'
        ]:
            assert_equal(
                bytes(
                    f'Header count is invalid or out of acceptable range (1-2000): {num}\r\n',
                    'ascii'),
                self.test_rest_request(f"/headers/{num}/{bb_hash}",
                                       ret_type=RetType.BYTES,
                                       status=400),
            )

        self.log.info("Test tx inclusion in the /mempool and /block URIs")

        # Make 3 chained txs and mine them on node 1
        txs = []
        input_txid = txid
        for _ in range(3):
            utxo_to_spend = self.wallet.get_utxo(txid=input_txid)
            txs.append(
                self.wallet.send_self_transfer(
                    from_node=self.nodes[0],
                    utxo_to_spend=utxo_to_spend)['txid'])
            input_txid = txs[-1]
        self.sync_all()

        # Check that there are exactly 3 transactions in the TX memory pool before generating the block
        json_obj = self.test_rest_request("/mempool/info")
        assert_equal(json_obj['size'], 3)
        # the size of the memory pool should be greater than 3x ~100 bytes
        assert_greater_than(json_obj['bytes'], 300)

        # Check that there are our submitted transactions in the TX memory pool
        json_obj = self.test_rest_request("/mempool/contents")
        for i, tx in enumerate(txs):
            assert tx in json_obj
            assert_equal(json_obj[tx]['spentby'], txs[i + 1:i + 2])
            assert_equal(json_obj[tx]['depends'], txs[i - 1:i])

        # Now mine the transactions
        newblockhash = self.generate(self.nodes[1], 1)

        # Check if the 3 tx show up in the new block
        json_obj = self.test_rest_request(f"/block/{newblockhash[0]}")
        non_coinbase_txs = {
            tx['txid']
            for tx in json_obj['tx'] if 'coinbase' not in tx['vin'][0]
        }
        assert_equal(non_coinbase_txs, set(txs))

        # Verify that the non-coinbase tx has "prevout" key set
        for tx_obj in json_obj["tx"]:
            for vin in tx_obj["vin"]:
                if "coinbase" not in vin:
                    assert "prevout" in vin
                    assert_equal(vin["prevout"]["generated"], False)
                else:
                    assert "prevout" not in vin

        # Check the same but without tx details
        json_obj = self.test_rest_request(
            f"/block/notxdetails/{newblockhash[0]}")
        for tx in txs:
            assert tx in json_obj['tx']

        self.log.info("Test the /chaininfo URI")

        bb_hash = self.nodes[0].getbestblockhash()

        json_obj = self.test_rest_request("/chaininfo")
        assert_equal(json_obj['bestblockhash'], bb_hash)
예제 #9
0
class ReplaceByFeeTest(BGLTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.extra_args = [
            [
                "-acceptnonstdtxn=1",
                "-maxorphantx=1000",
                "-limitancestorcount=50",
                "-limitancestorsize=101",
                "-limitdescendantcount=200",
                "-limitdescendantsize=101",
            ],
        ]
        self.supports_cli = False

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        # the pre-mined test framework chain contains coinbase outputs to the
        # MiniWallet's default address ADDRESS_BCRT1_P2WSH_OP_TRUE in blocks
        # 76-100 (see method BitcoinTestFramework._initialize_chain())
        self.wallet.rescan_utxos()

        self.log.info("Running test simple doublespend...")
        self.test_simple_doublespend()

        self.log.info("Running test doublespend chain...")
        self.test_doublespend_chain()

        self.log.info("Running test doublespend tree...")
        self.test_doublespend_tree()

        self.log.info("Running test replacement feeperkb...")
        self.test_replacement_feeperkb()

        self.log.info("Running test spends of conflicting outputs...")
        self.test_spends_of_conflicting_outputs()

        self.log.info("Running test new unconfirmed inputs...")
        self.test_new_unconfirmed_inputs()

        self.log.info("Running test too many replacements...")
        self.test_too_many_replacements()

        self.log.info("Running test opt-in...")
        self.test_opt_in()

        self.log.info("Running test RPC...")
        self.test_rpc()

        self.log.info("Running test prioritised transactions...")
        self.test_prioritised_transactions()

        self.log.info("Running test no inherited signaling...")
        self.test_no_inherited_signaling()

        self.log.info("Running test replacement relay fee...")
        self.test_replacement_relay_fee()

        self.log.info("Passed")

    def make_utxo(self,
                  node,
                  amount,
                  confirmed=True,
                  scriptPubKey=DUMMY_P2WPKH_SCRIPT):
        """Create a txout with a given amount and scriptPubKey

        confirmed - txouts created will be confirmed in the blockchain;
                    unconfirmed otherwise.
        """
        txid, n = self.wallet.send_to(from_node=node,
                                      scriptPubKey=scriptPubKey,
                                      amount=amount)

        # If requested, ensure txouts are confirmed.
        if confirmed:
            mempool_size = len(node.getrawmempool())
            while mempool_size > 0:
                self.generate(node, 1)
                new_size = len(node.getrawmempool())
                # Error out if we have something stuck in the mempool, as this
                # would likely be a bug.
                assert new_size < mempool_size
                mempool_size = new_size

        return COutPoint(int(txid, 16), n)

    def test_simple_doublespend(self):
        """Simple doublespend"""
        # we use MiniWallet to create a transaction template with inputs correctly set,
        # and modify the output (amount, scriptPubKey) according to our needs
        tx_template = self.wallet.create_self_transfer(
            from_node=self.nodes[0])['tx']

        tx1a = deepcopy(tx_template)
        tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx1a_hex = tx1a.serialize().hex()
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)

        # Should fail because we haven't changed the fee
        tx1b = deepcopy(tx_template)
        tx1b.vout = [CTxOut(1 * COIN, DUMMY_2_P2WPKH_SCRIPT)]
        tx1b_hex = tx1b.serialize().hex()

        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

        # Extra 0.1 BTC fee
        tx1b.vout[0].nValue -= int(0.1 * COIN)
        tx1b_hex = tx1b.serialize().hex()
        # Works when enabled
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)

        mempool = self.nodes[0].getrawmempool()

        assert tx1a_txid not in mempool
        assert tx1b_txid in mempool

        assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))

    def test_doublespend_chain(self):
        """Doublespend of a long chain"""

        initial_nValue = 5 * COIN
        tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)

        prevout = tx0_outpoint
        remaining_value = initial_nValue
        chain_txids = []
        while remaining_value > 1 * COIN:
            remaining_value -= int(0.1 * COIN)
            tx = CTransaction()
            tx.vin = [CTxIn(prevout, nSequence=0)]
            tx.vout = [
                CTxOut(remaining_value, CScript([1, OP_DROP] * 15 + [1]))
            ]
            tx_hex = tx.serialize().hex()
            txid = self.nodes[0].sendrawtransaction(tx_hex, 0)
            chain_txids.append(txid)
            prevout = COutPoint(int(txid, 16), 0)

        # Whether the double-spend is allowed is evaluated by including all
        # child fees - 4 BTC - so this attempt is rejected.
        dbl_tx = CTransaction()
        dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        dbl_tx.vout = [CTxOut(initial_nValue - 3 * COIN, DUMMY_P2WPKH_SCRIPT)]
        dbl_tx_hex = dbl_tx.serialize().hex()

        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, dbl_tx_hex,
                                0)

        # Accepted with sufficient fee
        dbl_tx = CTransaction()
        dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        dbl_tx.vout = [CTxOut(int(0.1 * COIN), DUMMY_P2WPKH_SCRIPT)]
        dbl_tx_hex = dbl_tx.serialize().hex()
        self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)

        mempool = self.nodes[0].getrawmempool()
        for doublespent_txid in chain_txids:
            assert doublespent_txid not in mempool

    def test_doublespend_tree(self):
        """Doublespend of a big tree of transactions"""

        initial_nValue = 5 * COIN
        tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)

        def branch(prevout,
                   initial_value,
                   max_txs,
                   tree_width=5,
                   fee=0.00001 * COIN,
                   _total_txs=None):
            if _total_txs is None:
                _total_txs = [0]
            if _total_txs[0] >= max_txs:
                return

            txout_value = (initial_value - fee) // tree_width
            if txout_value < fee:
                return

            vout = [
                CTxOut(txout_value, CScript([i + 1]))
                for i in range(tree_width)
            ]
            tx = CTransaction()
            tx.vin = [CTxIn(prevout, nSequence=0)]
            tx.vout = vout
            tx_hex = tx.serialize().hex()

            assert len(tx.serialize()) < 100000
            txid = self.nodes[0].sendrawtransaction(tx_hex, 0)
            yield tx
            _total_txs[0] += 1

            txid = int(txid, 16)

            for i, txout in enumerate(tx.vout):
                for x in branch(COutPoint(txid, i),
                                txout_value,
                                max_txs,
                                tree_width=tree_width,
                                fee=fee,
                                _total_txs=_total_txs):
                    yield x

        fee = int(0.00001 * COIN)
        n = MAX_REPLACEMENT_LIMIT
        tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
        assert_equal(len(tree_txs), n)

        # Attempt double-spend, will fail because too little fee paid
        dbl_tx = CTransaction()
        dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        dbl_tx.vout = [CTxOut(initial_nValue - fee * n, DUMMY_P2WPKH_SCRIPT)]
        dbl_tx_hex = dbl_tx.serialize().hex()
        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, dbl_tx_hex,
                                0)

        # 0.1 BTC fee is enough
        dbl_tx = CTransaction()
        dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        dbl_tx.vout = [
            CTxOut(initial_nValue - fee * n - int(0.1 * COIN),
                   DUMMY_P2WPKH_SCRIPT)
        ]
        dbl_tx_hex = dbl_tx.serialize().hex()
        self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)

        mempool = self.nodes[0].getrawmempool()

        for tx in tree_txs:
            tx.rehash()
            assert tx.hash not in mempool

        # Try again, but with more total transactions than the "max txs
        # double-spent at once" anti-DoS limit.
        for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2):
            fee = int(0.00001 * COIN)
            tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)
            tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
            assert_equal(len(tree_txs), n)

            dbl_tx = CTransaction()
            dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
            dbl_tx.vout = [
                CTxOut(initial_nValue - 2 * fee * n, DUMMY_P2WPKH_SCRIPT)
            ]
            dbl_tx_hex = dbl_tx.serialize().hex()
            # This will raise an exception
            assert_raises_rpc_error(-26, "too many potential replacements",
                                    self.nodes[0].sendrawtransaction,
                                    dbl_tx_hex, 0)

            for tx in tree_txs:
                tx.rehash()
                self.nodes[0].getrawtransaction(tx.hash)

    def test_replacement_feeperkb(self):
        """Replacement requires fee-per-KB to be higher"""
        tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx1a_hex = tx1a.serialize().hex()
        self.nodes[0].sendrawtransaction(tx1a_hex, 0)

        # Higher fee, but the fee per KB is much lower, so the replacement is
        # rejected.
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 999000]))]
        tx1b_hex = tx1b.serialize().hex()

        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

    def test_spends_of_conflicting_outputs(self):
        """Replacements that spend conflicting tx outputs are rejected"""
        utxo1 = self.make_utxo(self.nodes[0], int(1.2 * COIN))
        utxo2 = self.make_utxo(self.nodes[0], 3 * COIN)

        tx1a = CTransaction()
        tx1a.vin = [CTxIn(utxo1, nSequence=0)]
        tx1a.vout = [CTxOut(int(1.1 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx1a_hex = tx1a.serialize().hex()
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)

        tx1a_txid = int(tx1a_txid, 16)

        # Direct spend an output of the transaction we're replacing.
        tx2 = CTransaction()
        tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0)]
        tx2.vin.append(CTxIn(COutPoint(tx1a_txid, 0), nSequence=0))
        tx2.vout = tx1a.vout
        tx2_hex = tx2.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx",
                                self.nodes[0].sendrawtransaction, tx2_hex, 0)

        # Spend tx1a's output to test the indirect case.
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
        tx1b.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx1b_hex = tx1b.serialize().hex()
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
        tx1b_txid = int(tx1b_txid, 16)

        tx2 = CTransaction()
        tx2.vin = [
            CTxIn(utxo1, nSequence=0),
            CTxIn(utxo2, nSequence=0),
            CTxIn(COutPoint(tx1b_txid, 0))
        ]
        tx2.vout = tx1a.vout
        tx2_hex = tx2.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx",
                                self.nodes[0].sendrawtransaction, tx2_hex, 0)

    def test_new_unconfirmed_inputs(self):
        """Replacements that add new unconfirmed inputs are rejected"""
        confirmed_utxo = self.make_utxo(self.nodes[0], int(1.1 * COIN))
        unconfirmed_utxo = self.make_utxo(self.nodes[0], int(0.1 * COIN),
                                          False)

        tx1 = CTransaction()
        tx1.vin = [CTxIn(confirmed_utxo)]
        tx1.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx1_hex = tx1.serialize().hex()
        self.nodes[0].sendrawtransaction(tx1_hex, 0)

        tx2 = CTransaction()
        tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
        tx2.vout = tx1.vout
        tx2_hex = tx2.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "replacement-adds-unconfirmed",
                                self.nodes[0].sendrawtransaction, tx2_hex, 0)

    def test_too_many_replacements(self):
        """Replacements that evict too many transactions are rejected"""
        # Try directly replacing more than MAX_REPLACEMENT_LIMIT
        # transactions

        # Start by creating a single transaction with many outputs
        initial_nValue = 10 * COIN
        utxo = self.make_utxo(self.nodes[0], initial_nValue)
        fee = int(0.0001 * COIN)
        split_value = int((initial_nValue - fee) / (MAX_REPLACEMENT_LIMIT + 1))

        outputs = []
        for _ in range(MAX_REPLACEMENT_LIMIT + 1):
            outputs.append(CTxOut(split_value, CScript([1])))

        splitting_tx = CTransaction()
        splitting_tx.vin = [CTxIn(utxo, nSequence=0)]
        splitting_tx.vout = outputs
        splitting_tx_hex = splitting_tx.serialize().hex()

        txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, 0)
        txid = int(txid, 16)

        # Now spend each of those outputs individually
        for i in range(MAX_REPLACEMENT_LIMIT + 1):
            tx_i = CTransaction()
            tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)]
            tx_i.vout = [CTxOut(split_value - fee, DUMMY_P2WPKH_SCRIPT)]
            tx_i_hex = tx_i.serialize().hex()
            self.nodes[0].sendrawtransaction(tx_i_hex, 0)

        # Now create doublespend of the whole lot; should fail.
        # Need a big enough fee to cover all spending transactions and have
        # a higher fee rate
        double_spend_value = (split_value -
                              100 * fee) * (MAX_REPLACEMENT_LIMIT + 1)
        inputs = []
        for i in range(MAX_REPLACEMENT_LIMIT + 1):
            inputs.append(CTxIn(COutPoint(txid, i), nSequence=0))
        double_tx = CTransaction()
        double_tx.vin = inputs
        double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
        double_tx_hex = double_tx.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "too many potential replacements",
                                self.nodes[0].sendrawtransaction,
                                double_tx_hex, 0)

        # If we remove an input, it should pass
        double_tx = CTransaction()
        double_tx.vin = inputs[0:-1]
        double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
        double_tx_hex = double_tx.serialize().hex()
        self.nodes[0].sendrawtransaction(double_tx_hex, 0)

    def test_opt_in(self):
        """Replacing should only work if orig tx opted in"""
        tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a non-opting in transaction
        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
        tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx1a_hex = tx1a.serialize().hex()
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)

        # This transaction isn't shown as replaceable
        assert_equal(
            self.nodes[0].getmempoolentry(tx1a_txid)['bip125-replaceable'],
            False)

        # Shouldn't be able to double-spend
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx1b_hex = tx1b.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

        tx1_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a different non-opting in transaction
        tx2a = CTransaction()
        tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
        tx2a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx2a_hex = tx2a.serialize().hex()
        tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, 0)

        # Still shouldn't be able to double-spend
        tx2b = CTransaction()
        tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx2b_hex = tx2b.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx2b_hex, 0)

        # Now create a new transaction that spends from tx1a and tx2a
        # opt-in on one of the inputs
        # Transaction should be replaceable on either input

        tx1a_txid = int(tx1a_txid, 16)
        tx2a_txid = int(tx2a_txid, 16)

        tx3a = CTransaction()
        tx3a.vin = [
            CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
            CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)
        ]
        tx3a.vout = [
            CTxOut(int(0.9 * COIN), CScript([b'c'])),
            CTxOut(int(0.9 * COIN), CScript([b'd']))
        ]
        tx3a_hex = tx3a.serialize().hex()

        tx3a_txid = self.nodes[0].sendrawtransaction(tx3a_hex, 0)

        # This transaction is shown as replaceable
        assert_equal(
            self.nodes[0].getmempoolentry(tx3a_txid)['bip125-replaceable'],
            True)

        tx3b = CTransaction()
        tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
        tx3b.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx3b_hex = tx3b.serialize().hex()

        tx3c = CTransaction()
        tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
        tx3c.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx3c_hex = tx3c.serialize().hex()

        self.nodes[0].sendrawtransaction(tx3b_hex, 0)
        # If tx3b was accepted, tx3c won't look like a replacement,
        # but make sure it is accepted anyway
        self.nodes[0].sendrawtransaction(tx3c_hex, 0)

    def test_prioritised_transactions(self):
        # Ensure that fee deltas used via prioritisetransaction are
        # correctly used by replacement logic

        # 1. Check that feeperkb uses modified fees
        tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx1a_hex = tx1a.serialize().hex()
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)

        # Higher fee, but the actual fee per KB is much lower.
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 740000]))]
        tx1b_hex = tx1b.serialize().hex()

        # Verify tx1b cannot replace tx1a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

        # Use prioritisetransaction to set tx1a's fee to 0.
        self.nodes[0].prioritisetransaction(txid=tx1a_txid,
                                            fee_delta=int(-0.1 * COIN))

        # Now tx1b should be able to replace tx1a
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)

        assert tx1b_txid in self.nodes[0].getrawmempool()

        # 2. Check that absolute fee checks use modified fee.
        tx1_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        tx2a = CTransaction()
        tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
        tx2a_hex = tx2a.serialize().hex()
        self.nodes[0].sendrawtransaction(tx2a_hex, 0)

        # Lower fee, but we'll prioritise it
        tx2b = CTransaction()
        tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2b.vout = [CTxOut(int(1.01 * COIN), DUMMY_P2WPKH_SCRIPT)]
        tx2b.rehash()
        tx2b_hex = tx2b.serialize().hex()

        # Verify tx2b cannot replace tx2a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx2b_hex, 0)

        # Now prioritise tx2b to have a higher modified fee
        self.nodes[0].prioritisetransaction(txid=tx2b.hash,
                                            fee_delta=int(0.1 * COIN))

        # tx2b should now be accepted
        tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, 0)

        assert tx2b_txid in self.nodes[0].getrawmempool()

    def test_rpc(self):
        us0 = self.wallet.get_utxo()
        ins = [us0]
        outs = {ADDRESS_BCRT1_UNSPENDABLE: Decimal(1.0000000)}
        rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True)
        rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False)
        json0 = self.nodes[0].decoderawtransaction(rawtx0)
        json1 = self.nodes[0].decoderawtransaction(rawtx1)
        assert_equal(json0["vin"][0]["sequence"], 4294967293)
        assert_equal(json1["vin"][0]["sequence"], 4294967295)

        if self.is_wallet_compiled():
            self.init_wallet(node=0)
            rawtx2 = self.nodes[0].createrawtransaction([], outs)
            frawtx2a = self.nodes[0].fundrawtransaction(
                rawtx2, {"replaceable": True})
            frawtx2b = self.nodes[0].fundrawtransaction(
                rawtx2, {"replaceable": False})

            json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex'])
            json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex'])
            assert_equal(json0["vin"][0]["sequence"], 4294967293)
            assert_equal(json1["vin"][0]["sequence"], 4294967294)

    def test_no_inherited_signaling(self):
        confirmed_utxo = self.wallet.get_utxo()

        # Create an explicitly opt-in parent transaction
        optin_parent_tx = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=confirmed_utxo,
            sequence=BIP125_SEQUENCE_NUMBER,
            fee_rate=Decimal('0.01'),
        )
        assert_equal(
            True, self.nodes[0].getmempoolentry(
                optin_parent_tx['txid'])['bip125-replaceable'])

        replacement_parent_tx = self.wallet.create_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=confirmed_utxo,
            sequence=BIP125_SEQUENCE_NUMBER,
            fee_rate=Decimal('0.02'),
        )

        # Test if parent tx can be replaced.
        res = self.nodes[0].testmempoolaccept(
            rawtxs=[replacement_parent_tx['hex']])[0]

        # Parent can be replaced.
        assert_equal(res['allowed'], True)

        # Create an opt-out child tx spending the opt-in parent
        parent_utxo = self.wallet.get_utxo(txid=optin_parent_tx['txid'])
        optout_child_tx = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=parent_utxo,
            sequence=0xffffffff,
            fee_rate=Decimal('0.01'),
        )

        # Reports true due to inheritance
        assert_equal(
            True, self.nodes[0].getmempoolentry(
                optout_child_tx['txid'])['bip125-replaceable'])

        replacement_child_tx = self.wallet.create_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=parent_utxo,
            sequence=0xffffffff,
            fee_rate=Decimal('0.02'),
            mempool_valid=False,
        )

        # Broadcast replacement child tx
        # BIP 125 :
        # 1. The original transactions signal replaceability explicitly or through inheritance as described in the above
        # Summary section.
        # The original transaction (`optout_child_tx`) doesn't signal RBF but its parent (`optin_parent_tx`) does.
        # The replacement transaction (`replacement_child_tx`) should be able to replace the original transaction.
        # See CVE-2021-31876 for further explanations.
        assert_equal(
            True, self.nodes[0].getmempoolentry(
                optin_parent_tx['txid'])['bip125-replaceable'])
        assert_raises_rpc_error(-26, 'txn-mempool-conflict',
                                self.nodes[0].sendrawtransaction,
                                replacement_child_tx["hex"], 0)

        self.log.info(
            'Check that the child tx can still be replaced (via a tx that also replaces the parent)'
        )
        replacement_parent_tx = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=confirmed_utxo,
            sequence=0xffffffff,
            fee_rate=Decimal('0.03'),
        )
        # Check that child is removed and update wallet utxo state
        assert_raises_rpc_error(-5, 'Transaction not in mempool',
                                self.nodes[0].getmempoolentry,
                                optout_child_tx['txid'])
        self.wallet.get_utxo(txid=optout_child_tx['txid'])

    def test_replacement_relay_fee(self):
        tx = self.wallet.send_self_transfer(from_node=self.nodes[0])['tx']

        # Higher fee, higher feerate, different txid, but the replacement does not provide a relay
        # fee conforming to node's `incrementalrelayfee` policy of 1000 sat per KB.
        tx.vout[0].nValue -= 1
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction,
                                tx.serialize().hex())
예제 #10
0
class RPCMempoolInfoTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.generate(self.wallet, COINBASE_MATURITY + 1)
        self.wallet.rescan_utxos()
        confirmed_utxo = self.wallet.get_utxo()

        # Create a tree of unconfirmed transactions in the mempool:
        #             txA
        #             / \
        #            /   \
        #           /     \
        #          /       \
        #         /         \
        #       txB         txC
        #       / \         / \
        #      /   \       /   \
        #    txD   txE   txF   txG
        #            \   /
        #             \ /
        #             txH

        def create_tx(**kwargs):
            return self.wallet.send_self_transfer_multi(
                from_node=self.nodes[0],
                **kwargs,
            )

        txA = create_tx(utxos_to_spend=[confirmed_utxo], num_outputs=2)
        txB = create_tx(utxos_to_spend=[txA["new_utxos"][0]], num_outputs=2)
        txC = create_tx(utxos_to_spend=[txA["new_utxos"][1]], num_outputs=2)
        txD = create_tx(utxos_to_spend=[txB["new_utxos"][0]], num_outputs=1)
        txE = create_tx(utxos_to_spend=[txB["new_utxos"][1]], num_outputs=1)
        txF = create_tx(utxos_to_spend=[txC["new_utxos"][0]], num_outputs=2)
        txG = create_tx(utxos_to_spend=[txC["new_utxos"][1]], num_outputs=1)
        txH = create_tx(utxos_to_spend=[txE["new_utxos"][0],txF["new_utxos"][0]], num_outputs=1)
        txidA, txidB, txidC, txidD, txidE, txidF, txidG, txidH = [
            tx["txid"] for tx in [txA, txB, txC, txD, txE, txF, txG, txH]
        ]

        mempool = self.nodes[0].getrawmempool()
        assert_equal(len(mempool), 8)
        for txid in [txidA, txidB, txidC, txidD, txidE, txidF, txidG, txidH]:
            assert_equal(txid in mempool, True)

        self.log.info("Find transactions spending outputs")
        result = self.nodes[0].gettxspendingprevout([ {'txid' : confirmed_utxo['txid'], 'vout' : 0}, {'txid' : txidA, 'vout' : 1} ])
        assert_equal(result, [ {'txid' : confirmed_utxo['txid'], 'vout' : 0, 'spendingtxid' : txidA}, {'txid' : txidA, 'vout' : 1, 'spendingtxid' : txidC} ])

        self.log.info("Find transaction spending multiple outputs")
        result = self.nodes[0].gettxspendingprevout([ {'txid' : txidE, 'vout' : 0}, {'txid' : txidF, 'vout' : 0} ])
        assert_equal(result, [ {'txid' : txidE, 'vout' : 0, 'spendingtxid' : txidH}, {'txid' : txidF, 'vout' : 0, 'spendingtxid' : txidH} ])

        self.log.info("Find no transaction when output is unspent")
        result = self.nodes[0].gettxspendingprevout([ {'txid' : txidH, 'vout' : 0} ])
        assert_equal(result, [ {'txid' : txidH, 'vout' : 0} ])
        result = self.nodes[0].gettxspendingprevout([ {'txid' : txidA, 'vout' : 5} ])
        assert_equal(result, [ {'txid' : txidA, 'vout' : 5} ])

        self.log.info("Mixed spent and unspent outputs")
        result = self.nodes[0].gettxspendingprevout([ {'txid' : txidB, 'vout' : 0}, {'txid' : txidG, 'vout' : 3} ])
        assert_equal(result, [ {'txid' : txidB, 'vout' : 0, 'spendingtxid' : txidD}, {'txid' : txidG, 'vout' : 3} ])

        self.log.info("Unknown input fields")
        assert_raises_rpc_error(-3, "Unexpected key unknown", self.nodes[0].gettxspendingprevout, [{'txid' : txidC, 'vout' : 1, 'unknown' : 42}])

        self.log.info("Invalid vout provided")
        assert_raises_rpc_error(-8, "Invalid parameter, vout cannot be negative", self.nodes[0].gettxspendingprevout, [{'txid' : txidA, 'vout' : -1}])

        self.log.info("Invalid txid provided")
        assert_raises_rpc_error(-3, "Expected type string for txid, got number", self.nodes[0].gettxspendingprevout, [{'txid' : 42, 'vout' : 0}])

        self.log.info("Missing outputs")
        assert_raises_rpc_error(-8, "Invalid parameter, outputs are missing", self.nodes[0].gettxspendingprevout, [])

        self.log.info("Missing vout")
        assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].gettxspendingprevout, [{'txid' : txidA}])

        self.log.info("Missing txid")
        assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].gettxspendingprevout, [{'vout' : 3}])
예제 #11
0
class ScantxoutsetTest(UmkoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1

    def sendtodestination(self, destination, amount):
        # interpret strings as addresses, assume scriptPubKey otherwise
        if isinstance(destination, str):
            destination = address_to_scriptpubkey(destination)
        self.wallet.send_to(from_node=self.nodes[0],
                            scriptPubKey=destination,
                            amount=int(COIN * amount))

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()

        self.log.info("Create UTXOs...")
        pubk1, spk_P2SH_SEGWIT, addr_P2SH_SEGWIT = getnewdestination(
            "p2sh-segwit")
        pubk2, spk_LEGACY, addr_LEGACY = getnewdestination("legacy")
        pubk3, spk_BECH32, addr_BECH32 = getnewdestination("bech32")
        self.sendtodestination(spk_P2SH_SEGWIT, 0.001)
        self.sendtodestination(spk_LEGACY, 0.002)
        self.sendtodestination(spk_BECH32, 0.004)

        #send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK
        self.sendtodestination("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc",
                               0.008)  # (m/0'/0'/0')
        self.sendtodestination("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR",
                               0.016)  # (m/0'/0'/1')
        self.sendtodestination("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg",
                               0.032)  # (m/0'/0'/1500')
        self.sendtodestination("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6",
                               0.064)  # (m/0'/0'/0)
        self.sendtodestination("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S",
                               0.128)  # (m/0'/0'/1)
        self.sendtodestination("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC",
                               0.256)  # (m/0'/0'/1500)
        self.sendtodestination("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7",
                               0.512)  # (m/1/1/0')
        self.sendtodestination("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA",
                               1.024)  # (m/1/1/1')
        self.sendtodestination("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ",
                               2.048)  # (m/1/1/1500')
        self.sendtodestination("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ",
                               4.096)  # (m/1/1/0)
        self.sendtodestination("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy",
                               8.192)  # (m/1/1/1)
        self.sendtodestination("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq",
                               16.384)  # (m/1/1/1500)

        self.generate(self.nodes[0], 1)

        scan = self.nodes[0].scantxoutset("start", [])
        info = self.nodes[0].gettxoutsetinfo()
        assert_equal(scan['success'], True)
        assert_equal(scan['height'], info['height'])
        assert_equal(scan['txouts'], info['txouts'])
        assert_equal(scan['bestblock'], info['bestblock'])

        self.log.info("Test if we have found the non HD unspent outputs.")
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "pkh(" + pubk1.hex() + ")", "pkh(" + pubk2.hex() + ")",
                "pkh(" + pubk3.hex() + ")"
            ])['total_amount'], Decimal("0.002"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "wpkh(" + pubk1.hex() + ")", "wpkh(" + pubk2.hex() + ")",
                "wpkh(" + pubk3.hex() + ")"
            ])['total_amount'], Decimal("0.004"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "sh(wpkh(" + pubk1.hex() + "))", "sh(wpkh(" + pubk2.hex() +
                "))", "sh(wpkh(" + pubk3.hex() + "))"
            ])['total_amount'], Decimal("0.001"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(" + pubk1.hex() + ")", "combo(" + pubk2.hex() + ")",
                "combo(" + pubk3.hex() + ")"
            ])['total_amount'], Decimal("0.007"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")",
                "addr(" + addr_BECH32 + ")"
            ])['total_amount'], Decimal("0.007"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")",
                "combo(" + pubk3.hex() + ")"
            ])['total_amount'], Decimal("0.007"))

        self.log.info("Test range validation.")
        assert_raises_rpc_error(-8, "End of range is too high",
                                self.nodes[0].scantxoutset, "start",
                                [{
                                    "desc": "desc",
                                    "range": -1
                                }])
        assert_raises_rpc_error(-8, "Range should be greater or equal than 0",
                                self.nodes[0].scantxoutset, "start",
                                [{
                                    "desc": "desc",
                                    "range": [-1, 10]
                                }])
        assert_raises_rpc_error(
            -8, "End of range is too high", self.nodes[0].scantxoutset,
            "start", [{
                "desc": "desc",
                "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]
            }])
        assert_raises_rpc_error(
            -8, "Range specified as [begin,end] must not have begin after end",
            self.nodes[0].scantxoutset, "start", [{
                "desc": "desc",
                "range": [2, 1]
            }])
        assert_raises_rpc_error(-8, "Range is too large",
                                self.nodes[0].scantxoutset, "start",
                                [{
                                    "desc": "desc",
                                    "range": [0, 1000001]
                                }])

        self.log.info("Test extended key derivation.")
        # Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset.
        # Note that all amounts in the UTXO set are powers of 2 multiplied by 0.001 UMK, so each amounts uniquely identifies a subset.
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"
            ])['total_amount'], Decimal("0.008"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"
            ])['total_amount'], Decimal("0.016"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"
            ])['total_amount'], Decimal("0.032"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"
            ])['total_amount'], Decimal("0.064"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"
            ])['total_amount'], Decimal("0.128"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"
            ])['total_amount'], Decimal("0.256"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)",
                "range": 1499
            }])['total_amount'], Decimal("0.024"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)",
                "range": 1500
            }])['total_amount'], Decimal("0.056"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)",
                "range": 1499
            }])['total_amount'], Decimal("0.192"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)",
                "range": 1500
            }])['total_amount'], Decimal("0.448"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"
            ])['total_amount'], Decimal("0.512"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"
            ])['total_amount'], Decimal("1.024"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"
            ])['total_amount'], Decimal("2.048"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"
            ])['total_amount'], Decimal("4.096"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"
            ])['total_amount'], Decimal("8.192"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"
            ])['total_amount'], Decimal("16.384"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"
            ])['total_amount'], Decimal("4.096"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo([abcdef88/1/2'/3/4h]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"
            ])['total_amount'], Decimal("8.192"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"
            ])['total_amount'], Decimal("16.384"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')",
                "range": 1499
            }])['total_amount'], Decimal("1.536"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')",
                "range": 1500
            }])['total_amount'], Decimal("3.584"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)",
                "range": 1499
            }])['total_amount'], Decimal("12.288"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)",
                "range": 1500
            }])['total_amount'], Decimal("28.672"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": 1499
            }])['total_amount'], Decimal("12.288"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": 1500
            }])['total_amount'], Decimal("28.672"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": [1500, 1500]
            }])['total_amount'], Decimal("16.384"))

        # Test the reported descriptors for a few matches
        assert_equal(
            descriptors(self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)",
                "range": 1499
            }])), [
                "pkh([0c5f9a1e/0'/0'/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)#dzxw429x",
                "pkh([0c5f9a1e/0'/0'/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)#43rvceed"
            ])
        assert_equal(
            descriptors(self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"
            ])), [
                "pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8"
            ])
        assert_equal(
            descriptors(self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": 1500
            }])), [
                'pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8',
                'pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)#vchwd07g',
                'pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)#z2t3ypsa'
            ])

        # Check that status and abort don't need second arg
        assert_equal(self.nodes[0].scantxoutset("status"), None)
        assert_equal(self.nodes[0].scantxoutset("abort"), False)

        # Check that second arg is needed for start
        assert_raises_rpc_error(
            -1, "scanobjects argument is required for the start action",
            self.nodes[0].scantxoutset, "start")
예제 #12
0
class MempoolPackagesTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.extra_args = [["-maxorphantx=1000"]]

    def chain_tx(self, utxos_to_spend, *, num_outputs=1):
        return self.wallet.send_self_transfer_multi(
            from_node=self.nodes[0],
            utxos_to_spend=utxos_to_spend,
            num_outputs=num_outputs)['new_utxos']

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()

        # MAX_ANCESTORS transactions off a confirmed tx should be fine
        chain = []
        utxo = self.wallet.get_utxo()
        for _ in range(4):
            utxo, utxo2 = self.chain_tx([utxo], num_outputs=2)
            chain.append(utxo2)
        for _ in range(MAX_ANCESTORS - 4):
            utxo, = self.chain_tx([utxo])
            chain.append(utxo)
        second_chain, = self.chain_tx([self.wallet.get_utxo()])

        # Check mempool has MAX_ANCESTORS + 1 transactions in it
        assert_equal(len(self.nodes[0].getrawmempool()), MAX_ANCESTORS + 1)

        # Adding one more transaction on to the chain should fail.
        assert_raises_rpc_error(
            -26,
            "too-long-mempool-chain, too many unconfirmed ancestors [limit: 25]",
            self.chain_tx, [utxo])
        # ...even if it chains on from some point in the middle of the chain.
        assert_raises_rpc_error(
            -26, "too-long-mempool-chain, too many descendants", self.chain_tx,
            [chain[2]])
        assert_raises_rpc_error(
            -26, "too-long-mempool-chain, too many descendants", self.chain_tx,
            [chain[1]])
        # ...even if it chains on to two parent transactions with one in the chain.
        assert_raises_rpc_error(
            -26, "too-long-mempool-chain, too many descendants", self.chain_tx,
            [chain[0], second_chain])
        # ...especially if its > 40k weight
        assert_raises_rpc_error(-26,
                                "too-long-mempool-chain, too many descendants",
                                self.chain_tx, [chain[0]],
                                num_outputs=350)
        # But not if it chains directly off the first transaction
        replacable_tx = self.wallet.send_self_transfer_multi(
            from_node=self.nodes[0], utxos_to_spend=[chain[0]])['tx']
        # and the second chain should work just fine
        self.chain_tx([second_chain])

        # Make sure we can RBF the chain which used our carve-out rule
        replacable_tx.vout[0].nValue -= 1000000
        self.nodes[0].sendrawtransaction(replacable_tx.serialize().hex())

        # Finally, check that we added two transactions
        assert_equal(len(self.nodes[0].getrawmempool()), MAX_ANCESTORS + 3)
예제 #13
0
class PrioritiseTransactionTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.extra_args = [[
            "-printpriority=1",
            "-datacarriersize=100000",
        ]] * self.num_nodes
        self.supports_cli = False

    def test_diamond(self):
        self.log.info("Test diamond-shape package with priority")
        mock_time = int(time.time())
        self.nodes[0].setmocktime(mock_time)

        #      tx_a
        #      / \
        #     /   \
        #   tx_b  tx_c
        #     \   /
        #      \ /
        #      tx_d

        tx_o_a = self.wallet.send_self_transfer_multi(
            from_node=self.nodes[0],
            num_outputs=2,
        )
        txid_a = tx_o_a["txid"]

        tx_o_b, tx_o_c = [
            self.wallet.send_self_transfer(
                from_node=self.nodes[0],
                utxo_to_spend=u,
            ) for u in tx_o_a["new_utxos"]
        ]
        txid_b = tx_o_b["txid"]
        txid_c = tx_o_c["txid"]

        tx_o_d = self.wallet.send_self_transfer_multi(
            from_node=self.nodes[0],
            utxos_to_spend=[
                self.wallet.get_utxo(txid=txid_b),
                self.wallet.get_utxo(txid=txid_c),
            ],
        )
        txid_d = tx_o_d["txid"]

        self.log.info("Test priority while txs are in mempool")
        raw_before = self.nodes[0].getrawmempool(verbose=True)
        fee_delta_b = Decimal(9999) / COIN
        fee_delta_c_1 = Decimal(-1234) / COIN
        fee_delta_c_2 = Decimal(8888) / COIN
        self.nodes[0].prioritisetransaction(txid=txid_b,
                                            fee_delta=int(fee_delta_b * COIN))
        self.nodes[0].prioritisetransaction(txid=txid_c,
                                            fee_delta=int(fee_delta_c_1 *
                                                          COIN))
        self.nodes[0].prioritisetransaction(txid=txid_c,
                                            fee_delta=int(fee_delta_c_2 *
                                                          COIN))
        raw_before[txid_a]["fees"][
            "descendant"] += fee_delta_b + fee_delta_c_1 + fee_delta_c_2
        raw_before[txid_b]["fees"]["modified"] += fee_delta_b
        raw_before[txid_b]["fees"]["ancestor"] += fee_delta_b
        raw_before[txid_b]["fees"]["descendant"] += fee_delta_b
        raw_before[txid_c]["fees"]["modified"] += fee_delta_c_1 + fee_delta_c_2
        raw_before[txid_c]["fees"]["ancestor"] += fee_delta_c_1 + fee_delta_c_2
        raw_before[txid_c]["fees"][
            "descendant"] += fee_delta_c_1 + fee_delta_c_2
        raw_before[txid_d]["fees"][
            "ancestor"] += fee_delta_b + fee_delta_c_1 + fee_delta_c_2
        raw_after = self.nodes[0].getrawmempool(verbose=True)
        assert_equal(raw_before[txid_a], raw_after[txid_a])
        assert_equal(raw_before, raw_after)

        self.log.info("Test priority while txs are not in mempool")
        self.restart_node(0, extra_args=["-nopersistmempool"])
        self.nodes[0].setmocktime(mock_time)
        assert_equal(self.nodes[0].getmempoolinfo()["size"], 0)
        self.nodes[0].prioritisetransaction(txid=txid_b,
                                            fee_delta=int(fee_delta_b * COIN))
        self.nodes[0].prioritisetransaction(txid=txid_c,
                                            fee_delta=int(fee_delta_c_1 *
                                                          COIN))
        self.nodes[0].prioritisetransaction(txid=txid_c,
                                            fee_delta=int(fee_delta_c_2 *
                                                          COIN))
        for t in [tx_o_a["hex"], tx_o_b["hex"], tx_o_c["hex"], tx_o_d["hex"]]:
            self.nodes[0].sendrawtransaction(t)
        raw_after = self.nodes[0].getrawmempool(verbose=True)
        assert_equal(raw_before[txid_a], raw_after[txid_a])
        assert_equal(raw_before, raw_after)

        # Clear mempool
        self.generate(self.nodes[0], 1)

        # Use default extra_args
        self.restart_node(0)

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()

        # Test `prioritisetransaction` required parameters
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction)
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction, '')
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction, '', 0)

        # Test `prioritisetransaction` invalid extra parameters
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction, '', 0, 0,
                                0)

        # Test `prioritisetransaction` invalid `txid`
        assert_raises_rpc_error(-8,
                                "txid must be of length 64 (not 3, for 'foo')",
                                self.nodes[0].prioritisetransaction,
                                txid='foo',
                                fee_delta=0)
        assert_raises_rpc_error(
            -8,
            "txid must be hexadecimal string (not 'Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000')",
            self.nodes[0].prioritisetransaction,
            txid=
            'Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000',
            fee_delta=0)

        # Test `prioritisetransaction` invalid `dummy`
        txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000'
        assert_raises_rpc_error(-1, "JSON value is not a number as expected",
                                self.nodes[0].prioritisetransaction, txid,
                                'foo', 0)
        assert_raises_rpc_error(
            -8,
            "Priority is no longer supported, dummy argument to prioritisetransaction must be 0.",
            self.nodes[0].prioritisetransaction, txid, 1, 0)

        # Test `prioritisetransaction` invalid `fee_delta`
        assert_raises_rpc_error(-1,
                                "JSON value is not an integer as expected",
                                self.nodes[0].prioritisetransaction,
                                txid=txid,
                                fee_delta='foo')

        self.test_diamond()

        self.txouts = gen_return_txouts()
        self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']

        utxo_count = 90
        utxos = self.wallet.send_self_transfer_multi(
            from_node=self.nodes[0], num_outputs=utxo_count)['new_utxos']
        self.generate(self.wallet, 1)
        assert_equal(len(self.nodes[0].getrawmempool()), 0)

        base_fee = self.relayfee * 100  # our transactions are smaller than 100kb
        txids = []

        # Create 3 batches of transactions at 3 different fee rate levels
        range_size = utxo_count // 3
        for i in range(3):
            txids.append([])
            start_range = i * range_size
            end_range = start_range + range_size
            txids[i] = create_lots_of_big_transactions(
                self.wallet, self.nodes[0], (i + 1) * base_fee,
                end_range - start_range, self.txouts,
                utxos[start_range:end_range])

        # Make sure that the size of each group of transactions exceeds
        # MAX_BLOCK_WEIGHT // 4 -- otherwise the test needs to be revised to
        # create more transactions.
        mempool = self.nodes[0].getrawmempool(True)
        sizes = [0, 0, 0]
        for i in range(3):
            for j in txids[i]:
                assert j in mempool
                sizes[i] += mempool[j]['vsize']
            assert sizes[i] > MAX_BLOCK_WEIGHT // 4  # Fail => raise utxo_count

        # add a fee delta to something in the cheapest bucket and make sure it gets mined
        # also check that a different entry in the cheapest bucket is NOT mined
        self.nodes[0].prioritisetransaction(txid=txids[0][0],
                                            fee_delta=int(3 * base_fee * COIN))

        self.generate(self.nodes[0], 1)

        mempool = self.nodes[0].getrawmempool()
        self.log.info("Assert that prioritised transaction was mined")
        assert txids[0][0] not in mempool
        assert txids[0][1] in mempool

        high_fee_tx = None
        for x in txids[2]:
            if x not in mempool:
                high_fee_tx = x

        # Something high-fee should have been mined!
        assert high_fee_tx is not None

        # Add a prioritisation before a tx is in the mempool (de-prioritising a
        # high-fee transaction so that it's now low fee).
        self.nodes[0].prioritisetransaction(
            txid=high_fee_tx, fee_delta=-int(2 * base_fee * COIN))

        # Add everything back to mempool
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Check to make sure our high fee rate tx is back in the mempool
        mempool = self.nodes[0].getrawmempool()
        assert high_fee_tx in mempool

        # Now verify the modified-high feerate transaction isn't mined before
        # the other high fee transactions. Keep mining until our mempool has
        # decreased by all the high fee size that we calculated above.
        while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]):
            self.generate(self.nodes[0], 1, sync_fun=self.no_op)

        # High fee transaction should not have been mined, but other high fee rate
        # transactions should have been.
        mempool = self.nodes[0].getrawmempool()
        self.log.info(
            "Assert that de-prioritised transaction is still in mempool")
        assert high_fee_tx in mempool
        for x in txids[2]:
            if (x != high_fee_tx):
                assert x not in mempool

        # Create a free transaction.  Should be rejected.
        tx_res = self.wallet.create_self_transfer(fee_rate=0)
        tx_hex = tx_res['hex']
        tx_id = tx_res['txid']

        # This will raise an exception due to min relay fee not being met
        assert_raises_rpc_error(-26, "min relay fee not met",
                                self.nodes[0].sendrawtransaction, tx_hex)
        assert tx_id not in self.nodes[0].getrawmempool()

        # This is a less than 1000-byte transaction, so just set the fee
        # to be the minimum for a 1000-byte transaction and check that it is
        # accepted.
        self.nodes[0].prioritisetransaction(txid=tx_id,
                                            fee_delta=int(self.relayfee *
                                                          COIN))

        self.log.info(
            "Assert that prioritised free transaction is accepted to mempool")
        assert_equal(self.nodes[0].sendrawtransaction(tx_hex), tx_id)
        assert tx_id in self.nodes[0].getrawmempool()

        # Test that calling prioritisetransaction is sufficient to trigger
        # getblocktemplate to (eventually) return a new block.
        mock_time = int(time.time())
        self.nodes[0].setmocktime(mock_time)
        template = self.nodes[0].getblocktemplate({'rules': ['segwit']})
        self.nodes[0].prioritisetransaction(
            txid=tx_id, fee_delta=-int(self.relayfee * COIN))
        self.nodes[0].setmocktime(mock_time + 10)
        new_template = self.nodes[0].getblocktemplate({'rules': ['segwit']})

        assert template != new_template
예제 #14
0
class MempoolUnbroadcastTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 2
        if self.is_wallet_compiled():
            self.requires_wallet = True

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()
        self.test_broadcast()
        self.test_txn_removal()

    def test_broadcast(self):
        self.log.info("Test that mempool reattempts delivery of locally submitted transaction")
        node = self.nodes[0]

        self.disconnect_nodes(0, 1)

        self.log.info("Generate transactions that only node 0 knows about")

        if self.is_wallet_compiled():
            # generate a wallet txn
            addr = node.getnewaddress()
            wallet_tx_hsh = node.sendtoaddress(addr, 0.0001)

        # generate a txn using sendrawtransaction
        txFS = self.wallet.create_self_transfer()
        rpc_tx_hsh = node.sendrawtransaction(txFS["hex"])

        # check transactions are in unbroadcast using rpc
        mempoolinfo = self.nodes[0].getmempoolinfo()
        unbroadcast_count = 1
        if self.is_wallet_compiled():
            unbroadcast_count += 1
        assert_equal(mempoolinfo['unbroadcastcount'], unbroadcast_count)
        mempool = self.nodes[0].getrawmempool(True)
        for tx in mempool:
            assert_equal(mempool[tx]['unbroadcast'], True)

        # check that second node doesn't have these two txns
        mempool = self.nodes[1].getrawmempool()
        assert rpc_tx_hsh not in mempool
        if self.is_wallet_compiled():
            assert wallet_tx_hsh not in mempool

        # ensure that unbroadcast txs are persisted to mempool.dat
        self.restart_node(0)

        self.log.info("Reconnect nodes & check if they are sent to node 1")
        self.connect_nodes(0, 1)

        # fast forward into the future & ensure that the second node has the txns
        node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY)
        self.sync_mempools(timeout=30)
        mempool = self.nodes[1].getrawmempool()
        assert rpc_tx_hsh in mempool
        if self.is_wallet_compiled():
            assert wallet_tx_hsh in mempool

        # check that transactions are no longer in first node's unbroadcast set
        mempool = self.nodes[0].getrawmempool(True)
        for tx in mempool:
            assert_equal(mempool[tx]['unbroadcast'], False)

        self.log.info("Add another connection & ensure transactions aren't broadcast again")

        conn = node.add_p2p_connection(P2PTxInvStore())
        node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY)
        time.sleep(2) # allow sufficient time for possibility of broadcast
        assert_equal(len(conn.get_invs()), 0)

        self.disconnect_nodes(0, 1)
        node.disconnect_p2ps()

        self.log.info("Rebroadcast transaction and ensure it is not added to unbroadcast set when already in mempool")
        rpc_tx_hsh = node.sendrawtransaction(txFS["hex"])
        assert not node.getmempoolentry(rpc_tx_hsh)['unbroadcast']

    def test_txn_removal(self):
        self.log.info("Test that transactions removed from mempool are removed from unbroadcast set")
        node = self.nodes[0]

        # since the node doesn't have any connections, it will not receive
        # any GETDATAs & thus the transaction will remain in the unbroadcast set.
        txhsh = self.wallet.send_self_transfer(from_node=node)["txid"]

        # check transaction was removed from unbroadcast set due to presence in
        # a block
        removal_reason = "Removed {} from set of unbroadcast txns before confirmation that txn was sent out".format(txhsh)
        with node.assert_debug_log([removal_reason]):
            self.generate(node, 1, sync_fun=self.no_op)
예제 #15
0
class DataCarrierTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 3
        self.extra_args = [[], ["-datacarrier=0"],
                           [
                               "-datacarrier=1",
                               f"-datacarriersize={MAX_OP_RETURN_RELAY - 1}"
                           ]]

    def test_null_data_transaction(self, node: TestNode, data: bytes,
                                   success: bool) -> None:
        tx = self.wallet.create_self_transfer(fee_rate=0)["tx"]
        tx.vout.append(
            CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN, data])))
        tx.vout[0].nValue -= tx.get_vsize()  # simply pay 1sat/vbyte fee

        tx_hex = tx.serialize().hex()

        if success:
            self.wallet.sendrawtransaction(from_node=node, tx_hex=tx_hex)
            assert tx.rehash() in node.getrawmempool(
                True), f'{tx_hex} not in mempool'
        else:
            assert_raises_rpc_error(-26,
                                    "scriptpubkey",
                                    self.wallet.sendrawtransaction,
                                    from_node=node,
                                    tx_hex=tx_hex)

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()

        # By default, only 80 bytes are used for data (+1 for OP_RETURN, +2 for the pushdata opcodes).
        default_size_data = random_bytes(MAX_OP_RETURN_RELAY - 3)
        too_long_data = random_bytes(MAX_OP_RETURN_RELAY - 2)
        small_data = random_bytes(MAX_OP_RETURN_RELAY - 4)

        self.log.info(
            "Testing null data transaction with default -datacarrier and -datacarriersize values."
        )
        self.test_null_data_transaction(node=self.nodes[0],
                                        data=default_size_data,
                                        success=True)

        self.log.info(
            "Testing a null data transaction larger than allowed by the default -datacarriersize value."
        )
        self.test_null_data_transaction(node=self.nodes[0],
                                        data=too_long_data,
                                        success=False)

        self.log.info(
            "Testing a null data transaction with -datacarrier=false.")
        self.test_null_data_transaction(node=self.nodes[1],
                                        data=default_size_data,
                                        success=False)

        self.log.info(
            "Testing a null data transaction with a size larger than accepted by -datacarriersize."
        )
        self.test_null_data_transaction(node=self.nodes[2],
                                        data=default_size_data,
                                        success=False)

        self.log.info(
            "Testing a null data transaction with a size smaller than accepted by -datacarriersize."
        )
        self.test_null_data_transaction(node=self.nodes[2],
                                        data=small_data,
                                        success=True)
예제 #16
0
    def run_test(self):
        node = self.nodes[0]
        miniwallet = MiniWallet(node)
        miniwallet.rescan_utxos()

        self.log.info('Generate an empty block to address')
        address = miniwallet.get_address()
        hash = self.generateblock(node, output=address,
                                  transactions=[])['hash']
        block = node.getblock(blockhash=hash, verbose=2)
        assert_equal(len(block['tx']), 1)
        assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'],
                     address)

        self.log.info('Generate an empty block to a descriptor')
        hash = self.generateblock(node, 'addr(' + address + ')', [])['hash']
        block = node.getblock(blockhash=hash, verbosity=2)
        assert_equal(len(block['tx']), 1)
        assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'],
                     address)

        self.log.info(
            'Generate an empty block to a combo descriptor with compressed pubkey'
        )
        combo_key = '0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798'
        combo_address = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7kygt080'
        hash = self.generateblock(node, 'combo(' + combo_key + ')', [])['hash']
        block = node.getblock(hash, 2)
        assert_equal(len(block['tx']), 1)
        assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'],
                     combo_address)

        self.log.info(
            'Generate an empty block to a combo descriptor with uncompressed pubkey'
        )
        combo_key = '0408ef68c46d20596cc3f6ddf7c8794f71913add807f1dc55949fa805d764d191c0b7ce6894c126fce0babc6663042f3dde9b0cf76467ea315514e5a6731149c67'
        combo_address = 'mkc9STceoCcjoXEXe6cm66iJbmjM6zR9B2'
        hash = self.generateblock(node, 'combo(' + combo_key + ')', [])['hash']
        block = node.getblock(hash, 2)
        assert_equal(len(block['tx']), 1)
        assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'],
                     combo_address)

        # Generate some extra mempool transactions to verify they don't get mined
        for _ in range(10):
            miniwallet.send_self_transfer(from_node=node)

        self.log.info('Generate block with txid')
        txid = miniwallet.send_self_transfer(from_node=node)['txid']
        hash = self.generateblock(node, address, [txid])['hash']
        block = node.getblock(hash, 1)
        assert_equal(len(block['tx']), 2)
        assert_equal(block['tx'][1], txid)

        self.log.info('Generate block with raw tx')
        rawtx = miniwallet.create_self_transfer(from_node=node)['hex']
        hash = self.generateblock(node, address, [rawtx])['hash']

        block = node.getblock(hash, 1)
        assert_equal(len(block['tx']), 2)
        txid = block['tx'][1]
        assert_equal(
            node.getrawtransaction(txid=txid, verbose=False, blockhash=hash),
            rawtx)

        self.log.info('Fail to generate block with out of order txs')
        txid1 = miniwallet.send_self_transfer(from_node=node)['txid']
        utxo1 = miniwallet.get_utxo(txid=txid1)
        rawtx2 = miniwallet.create_self_transfer(from_node=node,
                                                 utxo_to_spend=utxo1)['hex']
        assert_raises_rpc_error(
            -25, 'TestBlockValidity failed: bad-txns-inputs-missingorspent',
            self.generateblock, node, address, [rawtx2, txid1])

        self.log.info('Fail to generate block with txid not in mempool')
        missing_txid = '0000000000000000000000000000000000000000000000000000000000000000'
        assert_raises_rpc_error(
            -5, 'Transaction ' + missing_txid + ' not in mempool.',
            self.generateblock, node, address, [missing_txid])

        self.log.info('Fail to generate block with invalid raw tx')
        invalid_raw_tx = '0000'
        assert_raises_rpc_error(
            -22, 'Transaction decode failed for ' + invalid_raw_tx,
            self.generateblock, node, address, [invalid_raw_tx])

        self.log.info('Fail to generate block with invalid address/descriptor')
        assert_raises_rpc_error(-5, 'Invalid address or descriptor',
                                self.generateblock, node, '1234', [])

        self.log.info('Fail to generate block with a ranged descriptor')
        ranged_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0/*)'
        assert_raises_rpc_error(
            -8,
            'Ranged descriptor not accepted. Maybe pass through deriveaddresses first?',
            self.generateblock, node, ranged_descriptor, [])

        self.log.info(
            'Fail to generate block with a descriptor missing a private key')
        child_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0\'/0)'
        assert_raises_rpc_error(-5,
                                'Cannot derive script without private keys',
                                self.generateblock, node, child_descriptor, [])
예제 #17
0
class EstimateFeeTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 3
        # Force fSendTrickle to true (via whitelist.noban)
        self.extra_args = [
            ["[email protected]"],
            ["[email protected]", "-blockmaxweight=68000"],
            ["[email protected]", "-blockmaxweight=32000"],
        ]

    def setup_network(self):
        """
        We'll setup the network to have 3 nodes that all mine with different parameters.
        But first we need to use one node to create a lot of outputs
        which we will use to generate our transactions.
        """
        self.add_nodes(3, extra_args=self.extra_args)
        # Use node0 to mine blocks for input splitting
        # Node1 mines small blocks but that are bigger than the expected transaction rate.
        # NOTE: the CreateNewBlock code starts counting block weight at 4,000 weight,
        # (68k weight is room enough for 120 or so transactions)
        # Node2 is a stingy miner, that
        # produces too small blocks (room for only 55 or so transactions)

    def transact_and_mine(self, numblocks, mining_node):
        min_fee = Decimal("0.00001")
        # We will now mine numblocks blocks generating on average 100 transactions between each block
        # We shuffle our confirmed txout set before each set of transactions
        # small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible
        # resorting to tx's that depend on the mempool when those run out
        for _ in range(numblocks):
            random.shuffle(self.confutxo)
            for _ in range(random.randrange(100 - 50, 100 + 50)):
                from_index = random.randint(1, 2)
                (txhex, fee) = small_txpuzzle_randfee(
                    self.wallet,
                    self.nodes[from_index],
                    self.confutxo,
                    self.memutxo,
                    Decimal("0.005"),
                    min_fee,
                    min_fee,
                )
                tx_kbytes = (len(txhex) // 2) / 1000.0
                self.fees_per_kb.append(float(fee) / tx_kbytes)
            self.sync_mempools(wait=0.1)
            mined = mining_node.getblock(
                self.generate(mining_node, 1)[0], True)["tx"]
            # update which txouts are confirmed
            newmem = []
            for utx in self.memutxo:
                if utx["txid"] in mined:
                    self.confutxo.append(utx)
                else:
                    newmem.append(utx)
            self.memutxo = newmem

    def initial_split(self, node):
        """Split two coinbase UTxOs into many small coins"""
        self.confutxo = self.wallet.send_self_transfer_multi(
            from_node=node,
            utxos_to_spend=[self.wallet.get_utxo() for _ in range(2)],
            num_outputs=2048)['new_utxos']
        while len(node.getrawmempool()) > 0:
            self.generate(node, 1, sync_fun=self.no_op)

    def sanity_check_estimates_range(self):
        """Populate estimation buckets, assert estimates are in a sane range and
        are strictly increasing as the target decreases."""
        self.fees_per_kb = []
        self.memutxo = []
        self.log.info("Will output estimates for 1/2/3/6/15/25 blocks")

        for _ in range(2):
            self.log.info(
                "Creating transactions and mining them with a block size that can't keep up"
            )
            # Create transactions and mine 10 small blocks with node 2, but create txs faster than we can mine
            self.transact_and_mine(10, self.nodes[2])
            check_estimates(self.nodes[1], self.fees_per_kb)

            self.log.info(
                "Creating transactions and mining them at a block size that is just big enough"
            )
            # Generate transactions while mining 10 more blocks, this time with node1
            # which mines blocks with capacity just above the rate that transactions are being created
            self.transact_and_mine(10, self.nodes[1])
            check_estimates(self.nodes[1], self.fees_per_kb)

        # Finish by mining a normal-sized block:
        while len(self.nodes[1].getrawmempool()) > 0:
            self.generate(self.nodes[1], 1)

        self.log.info("Final estimates after emptying mempools")
        check_estimates(self.nodes[1], self.fees_per_kb)

    def test_feerate_mempoolminfee(self):
        high_val = 3 * self.nodes[1].estimatesmartfee(1)["feerate"]
        self.restart_node(1, extra_args=[f"-minrelaytxfee={high_val}"])
        check_estimates(self.nodes[1], self.fees_per_kb)
        self.restart_node(1)

    def sanity_check_rbf_estimates(self, utxos):
        """During 5 blocks, broadcast low fee transactions. Only 10% of them get
        confirmed and the remaining ones get RBF'd with a high fee transaction at
        the next block.
        The block policy estimator should return the high feerate.
        """
        # The broadcaster and block producer
        node = self.nodes[0]
        miner = self.nodes[1]
        # In sat/vb
        low_feerate = 1
        high_feerate = 10
        # Cache the utxos of which to replace the spender after it failed to get
        # confirmed
        utxos_to_respend = []
        txids_to_replace = []

        assert_greater_than_or_equal(len(utxos), 250)
        for _ in range(5):
            # Broadcast 45 low fee transactions that will need to be RBF'd
            for _ in range(45):
                u = utxos.pop(0)
                txid = send_tx(self.wallet, node, u, low_feerate)
                utxos_to_respend.append(u)
                txids_to_replace.append(txid)
            # Broadcast 5 low fee transaction which don't need to
            for _ in range(5):
                send_tx(self.wallet, node, utxos.pop(0), low_feerate)
            # Mine the transactions on another node
            self.sync_mempools(wait=0.1, nodes=[node, miner])
            for txid in txids_to_replace:
                miner.prioritisetransaction(txid=txid, fee_delta=-COIN)
            self.generate(miner, 1)
            # RBF the low-fee transactions
            while len(utxos_to_respend) > 0:
                u = utxos_to_respend.pop(0)
                send_tx(self.wallet, node, u, high_feerate)

        # Mine the last replacement txs
        self.sync_mempools(wait=0.1, nodes=[node, miner])
        self.generate(miner, 1)

        # Only 10% of the transactions were really confirmed with a low feerate,
        # the rest needed to be RBF'd. We must return the 90% conf rate feerate.
        high_feerate_kvb = Decimal(high_feerate) / COIN * 10**3
        est_feerate = node.estimatesmartfee(2)["feerate"]
        assert_equal(est_feerate, high_feerate_kvb)

    def run_test(self):
        self.log.info("This test is time consuming, please be patient")
        self.log.info("Splitting inputs so we can generate tx's")

        # Split two coinbases into many small utxos
        self.start_node(0)
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()
        self.initial_split(self.nodes[0])
        self.log.info("Finished splitting")

        # Now we can connect the other nodes, didn't want to connect them earlier
        # so the estimates would not be affected by the splitting transactions
        self.start_node(1)
        self.start_node(2)
        self.connect_nodes(1, 0)
        self.connect_nodes(0, 2)
        self.connect_nodes(2, 1)
        self.sync_all()

        self.log.info("Testing estimates with single transactions.")
        self.sanity_check_estimates_range()

        # check that the effective feerate is greater than or equal to the mempoolminfee even for high mempoolminfee
        self.log.info(
            "Test fee rate estimation after restarting node with high MempoolMinFee"
        )
        self.test_feerate_mempoolminfee()

        self.log.info("Restarting node with fresh estimation")
        self.stop_node(0)
        fee_dat = os.path.join(self.nodes[0].datadir, self.chain,
                               "fee_estimates.dat")
        os.remove(fee_dat)
        self.start_node(0)
        self.connect_nodes(0, 1)
        self.connect_nodes(0, 2)

        self.log.info("Testing estimates with RBF.")
        self.sanity_check_rbf_estimates(self.confutxo + self.memutxo)

        self.log.info("Testing that fee estimation is disabled in blocksonly.")
        self.restart_node(0, ["-blocksonly"])
        assert_raises_rpc_error(-32603, "Fee estimation disabled",
                                self.nodes[0].estimatesmartfee, 2)
예제 #18
0
class ZMQTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 2
        # This test isn't testing txn relay/timing, so set whitelist on the
        # peers for instant txn relay. This speeds up the test run time 2-3x.
        self.extra_args = [["[email protected]"]] * self.num_nodes

    def skip_test_if_missing_module(self):
        self.skip_if_no_py3_zmq()
        self.skip_if_no_bitcoind_zmq()

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.ctx = zmq.Context()
        try:
            self.test_basic()
            self.test_sequence()
            self.test_mempool_sync()
            self.test_reorg()
            self.test_multiple_interfaces()
            self.test_ipv6()
        finally:
            # Destroy the ZMQ context.
            self.log.debug("Destroying ZMQ context")
            self.ctx.destroy(linger=None)

    # Restart node with the specified zmq notifications enabled, subscribe to
    # all of them and return the corresponding ZMQSubscriber objects.
    def setup_zmq_test(self,
                       services,
                       *,
                       recv_timeout=60,
                       sync_blocks=True,
                       ipv6=False):
        subscribers = []
        for topic, address in services:
            socket = self.ctx.socket(zmq.SUB)
            if ipv6:
                socket.setsockopt(zmq.IPV6, 1)
            subscribers.append(ZMQSubscriber(socket, topic.encode()))

        self.restart_node(
            0, [f"-zmqpub{topic}={address}"
                for topic, address in services] + self.extra_args[0])

        for i, sub in enumerate(subscribers):
            sub.socket.connect(services[i][1])

        # Ensure that all zmq publisher notification interfaces are ready by
        # running the following "sync up" procedure:
        #   1. Generate a block on the node
        #   2. Try to receive the corresponding notification on all subscribers
        #   3. If all subscribers get the message within the timeout (1 second),
        #      we are done, otherwise repeat starting from step 1
        for sub in subscribers:
            sub.socket.set(zmq.RCVTIMEO, 1000)
        while True:
            test_block = ZMQTestSetupBlock(self, self.nodes[0])
            recv_failed = False
            for sub in subscribers:
                try:
                    while not test_block.caused_notification(
                            sub.receive().hex()):
                        self.log.debug(
                            "Ignoring sync-up notification for previously generated block."
                        )
                except zmq.error.Again:
                    self.log.debug(
                        "Didn't receive sync-up notification, trying again.")
                    recv_failed = True
            if not recv_failed:
                self.log.debug(
                    "ZMQ sync-up completed, all subscribers are ready.")
                break

        # set subscriber's desired timeout for the test
        for sub in subscribers:
            sub.socket.set(zmq.RCVTIMEO, recv_timeout * 1000)

        self.connect_nodes(0, 1)
        if sync_blocks:
            self.sync_blocks()

        return subscribers

    def test_basic(self):

        # Invalid zmq arguments don't take down the node, see #17185.
        self.restart_node(0, ["-zmqpubrawtx=foo", "-zmqpubhashtx=bar"])

        address = 'tcp://127.0.0.1:28332'
        subs = self.setup_zmq_test([
            (topic, address)
            for topic in ["hashblock", "hashtx", "rawblock", "rawtx"]
        ])

        hashblock = subs[0]
        hashtx = subs[1]
        rawblock = subs[2]
        rawtx = subs[3]

        num_blocks = 5
        self.log.info(
            f"Generate {num_blocks} blocks (and {num_blocks} coinbase txes)")
        genhashes = self.generatetoaddress(self.nodes[0], num_blocks,
                                           ADDRESS_BCRT1_UNSPENDABLE)

        for x in range(num_blocks):
            # Should receive the coinbase txid.
            txid = hashtx.receive()

            # Should receive the coinbase raw transaction.
            hex = rawtx.receive()
            tx = CTransaction()
            tx.deserialize(BytesIO(hex))
            tx.calc_sha256()
            assert_equal(tx.hash, txid.hex())

            # Should receive the generated raw block.
            block = rawblock.receive()
            assert_equal(genhashes[x], hash256_reversed(block[:80]).hex())

            # Should receive the generated block hash.
            hash = hashblock.receive().hex()
            assert_equal(genhashes[x], hash)
            # The block should only have the coinbase txid.
            assert_equal([txid.hex()], self.nodes[1].getblock(hash)["tx"])

        self.wallet.rescan_utxos()
        self.log.info("Wait for tx from second node")
        payment_tx = self.wallet.send_self_transfer(from_node=self.nodes[1])
        payment_txid = payment_tx['txid']
        self.sync_all()
        # Should receive the broadcasted txid.
        txid = hashtx.receive()
        assert_equal(payment_txid, txid.hex())

        # Should receive the broadcasted raw transaction.
        hex = rawtx.receive()
        assert_equal(payment_tx['wtxid'], hash256_reversed(hex).hex())

        # Mining the block with this tx should result in second notification
        # after coinbase tx notification
        self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
        hashtx.receive()
        txid = hashtx.receive()
        assert_equal(payment_txid, txid.hex())

        self.log.info("Test the getzmqnotifications RPC")
        assert_equal(self.nodes[0].getzmqnotifications(), [
            {
                "type": "pubhashblock",
                "address": address,
                "hwm": 1000
            },
            {
                "type": "pubhashtx",
                "address": address,
                "hwm": 1000
            },
            {
                "type": "pubrawblock",
                "address": address,
                "hwm": 1000
            },
            {
                "type": "pubrawtx",
                "address": address,
                "hwm": 1000
            },
        ])

        assert_equal(self.nodes[1].getzmqnotifications(), [])

    def test_reorg(self):

        address = 'tcp://127.0.0.1:28333'

        # Should only notify the tip if a reorg occurs
        hashblock, hashtx = self.setup_zmq_test(
            [(topic, address) for topic in ["hashblock", "hashtx"]],
            recv_timeout=2)  # 2 second timeout to check end of notifications
        self.disconnect_nodes(0, 1)

        # Generate 1 block in nodes[0] with 1 mempool tx and receive all notifications
        payment_txid = self.wallet.send_self_transfer(
            from_node=self.nodes[0])['txid']
        disconnect_block = self.generatetoaddress(self.nodes[0],
                                                  1,
                                                  ADDRESS_BCRT1_UNSPENDABLE,
                                                  sync_fun=self.no_op)[0]
        disconnect_cb = self.nodes[0].getblock(disconnect_block)["tx"][0]
        assert_equal(self.nodes[0].getbestblockhash(),
                     hashblock.receive().hex())
        assert_equal(hashtx.receive().hex(), payment_txid)
        assert_equal(hashtx.receive().hex(), disconnect_cb)

        # Generate 2 blocks in nodes[1] to a different address to ensure split
        connect_blocks = self.generatetoaddress(self.nodes[1],
                                                2,
                                                ADDRESS_BCRT1_P2WSH_OP_TRUE,
                                                sync_fun=self.no_op)

        # nodes[0] will reorg chain after connecting back nodes[1]
        self.connect_nodes(0, 1)
        self.sync_blocks()  # tx in mempool valid but not advertised

        # Should receive nodes[1] tip
        assert_equal(self.nodes[1].getbestblockhash(),
                     hashblock.receive().hex())

        # During reorg:
        # Get old payment transaction notification from disconnect and disconnected cb
        assert_equal(hashtx.receive().hex(), payment_txid)
        assert_equal(hashtx.receive().hex(), disconnect_cb)
        # And the payment transaction again due to mempool entry
        assert_equal(hashtx.receive().hex(), payment_txid)
        assert_equal(hashtx.receive().hex(), payment_txid)
        # And the new connected coinbases
        for i in [0, 1]:
            assert_equal(hashtx.receive().hex(),
                         self.nodes[1].getblock(connect_blocks[i])["tx"][0])

        # If we do a simple invalidate we announce the disconnected coinbase
        self.nodes[0].invalidateblock(connect_blocks[1])
        assert_equal(hashtx.receive().hex(),
                     self.nodes[1].getblock(connect_blocks[1])["tx"][0])
        # And the current tip
        assert_equal(hashtx.receive().hex(),
                     self.nodes[1].getblock(connect_blocks[0])["tx"][0])

    def test_sequence(self):
        """
        Sequence zmq notifications give every blockhash and txhash in order
        of processing, regardless of IBD, re-orgs, etc.
        Format of messages:
        <32-byte hash>C :                 Blockhash connected
        <32-byte hash>D :                 Blockhash disconnected
        <32-byte hash>R<8-byte LE uint> : Transactionhash removed from mempool for non-block inclusion reason
        <32-byte hash>A<8-byte LE uint> : Transactionhash added mempool
        """
        self.log.info("Testing 'sequence' publisher")
        [seq] = self.setup_zmq_test([("sequence", "tcp://127.0.0.1:28333")])
        self.disconnect_nodes(0, 1)

        # Mempool sequence number starts at 1
        seq_num = 1

        # Generate 1 block in nodes[0] and receive all notifications
        dc_block = self.generatetoaddress(self.nodes[0],
                                          1,
                                          ADDRESS_BCRT1_UNSPENDABLE,
                                          sync_fun=self.no_op)[0]

        # Note: We are not notified of any block transactions, coinbase or mined
        assert_equal((self.nodes[0].getbestblockhash(), "C", None),
                     seq.receive_sequence())

        # Generate 2 blocks in nodes[1] to a different address to ensure a chain split
        self.generatetoaddress(self.nodes[1],
                               2,
                               ADDRESS_BCRT1_P2WSH_OP_TRUE,
                               sync_fun=self.no_op)

        # nodes[0] will reorg chain after connecting back nodes[1]
        self.connect_nodes(0, 1)

        # Then we receive all block (dis)connect notifications for the 2 block reorg
        assert_equal((dc_block, "D", None), seq.receive_sequence())
        block_count = self.nodes[1].getblockcount()
        assert_equal((self.nodes[1].getblockhash(block_count - 1), "C", None),
                     seq.receive_sequence())
        assert_equal((self.nodes[1].getblockhash(block_count), "C", None),
                     seq.receive_sequence())

        self.log.info("Wait for tx from second node")
        payment_tx = self.wallet.send_self_transfer(from_node=self.nodes[1])
        payment_txid = payment_tx['txid']
        self.sync_all()
        self.log.info(
            "Testing sequence notifications with mempool sequence values")

        # Should receive the broadcasted txid.
        assert_equal((payment_txid, "A", seq_num), seq.receive_sequence())
        seq_num += 1

        self.log.info("Testing RBF notification")
        # Replace it to test eviction/addition notification
        payment_tx['tx'].vout[0].nValue -= 1000
        rbf_txid = self.nodes[1].sendrawtransaction(
            payment_tx['tx'].serialize().hex())
        self.sync_all()
        assert_equal((payment_txid, "R", seq_num), seq.receive_sequence())
        seq_num += 1
        assert_equal((rbf_txid, "A", seq_num), seq.receive_sequence())
        seq_num += 1

        # Doesn't get published when mined, make a block and tx to "flush" the possibility
        # though the mempool sequence number does go up by the number of transactions
        # removed from the mempool by the block mining it.
        mempool_size = len(self.nodes[0].getrawmempool())
        c_block = self.generatetoaddress(self.nodes[0], 1,
                                         ADDRESS_BCRT1_UNSPENDABLE)[0]
        # Make sure the number of mined transactions matches the number of txs out of mempool
        mempool_size_delta = mempool_size - len(self.nodes[0].getrawmempool())
        assert_equal(
            len(self.nodes[0].getblock(c_block)["tx"]) - 1, mempool_size_delta)
        seq_num += mempool_size_delta
        payment_txid_2 = self.wallet.send_self_transfer(
            from_node=self.nodes[1])['txid']
        self.sync_all()
        assert_equal((c_block, "C", None), seq.receive_sequence())
        assert_equal((payment_txid_2, "A", seq_num), seq.receive_sequence())
        seq_num += 1

        # Spot check getrawmempool results that they only show up when asked for
        assert type(self.nodes[0].getrawmempool()) is list
        assert type(
            self.nodes[0].getrawmempool(mempool_sequence=False)) is list
        assert "mempool_sequence" not in self.nodes[0].getrawmempool(
            verbose=True)
        assert_raises_rpc_error(
            -8, "Verbose results cannot contain mempool sequence values.",
            self.nodes[0].getrawmempool, True, True)
        assert_equal(
            self.nodes[0].getrawmempool(
                mempool_sequence=True)["mempool_sequence"], seq_num)

        self.log.info("Testing reorg notifications")
        # Manually invalidate the last block to test mempool re-entry
        # N.B. This part could be made more lenient in exact ordering
        # since it greatly depends on inner-workings of blocks/mempool
        # during "deep" re-orgs. Probably should "re-construct"
        # blockchain/mempool state from notifications instead.
        block_count = self.nodes[0].getblockcount()
        best_hash = self.nodes[0].getbestblockhash()
        self.nodes[0].invalidateblock(best_hash)
        sleep(2)  # Bit of room to make sure transaction things happened

        # Make sure getrawmempool mempool_sequence results aren't "queued" but immediately reflective
        # of the time they were gathered.
        assert self.nodes[0].getrawmempool(
            mempool_sequence=True)["mempool_sequence"] > seq_num

        assert_equal((best_hash, "D", None), seq.receive_sequence())
        assert_equal((rbf_txid, "A", seq_num), seq.receive_sequence())
        seq_num += 1

        # Other things may happen but aren't wallet-deterministic so we don't test for them currently
        self.nodes[0].reconsiderblock(best_hash)
        self.generatetoaddress(self.nodes[1], 1, ADDRESS_BCRT1_UNSPENDABLE)

        self.log.info("Evict mempool transaction by block conflict")
        orig_tx = self.wallet.send_self_transfer(from_node=self.nodes[0])
        orig_txid = orig_tx['txid']

        # More to be simply mined
        more_tx = []
        for _ in range(5):
            more_tx.append(
                self.wallet.send_self_transfer(from_node=self.nodes[0]))

        orig_tx['tx'].vout[0].nValue -= 1000
        bump_txid = self.nodes[0].sendrawtransaction(
            orig_tx['tx'].serialize().hex())
        # Mine the pre-bump tx
        txs_to_add = [orig_tx['hex']] + [tx['hex'] for tx in more_tx]
        block = create_block(int(self.nodes[0].getbestblockhash(), 16),
                             create_coinbase(self.nodes[0].getblockcount() +
                                             1),
                             txlist=txs_to_add)
        add_witness_commitment(block)
        block.solve()
        assert_equal(self.nodes[0].submitblock(block.serialize().hex()), None)
        tip = self.nodes[0].getbestblockhash()
        assert_equal(int(tip, 16), block.sha256)
        orig_txid_2 = self.wallet.send_self_transfer(
            from_node=self.nodes[0])['txid']

        # Flush old notifications until evicted tx original entry
        (hash_str, label, mempool_seq) = seq.receive_sequence()
        while hash_str != orig_txid:
            (hash_str, label, mempool_seq) = seq.receive_sequence()
        mempool_seq += 1

        # Added original tx
        assert_equal(label, "A")
        # More transactions to be simply mined
        for i in range(len(more_tx)):
            assert_equal((more_tx[i]['txid'], "A", mempool_seq),
                         seq.receive_sequence())
            mempool_seq += 1
        # Bumped by rbf
        assert_equal((orig_txid, "R", mempool_seq), seq.receive_sequence())
        mempool_seq += 1
        assert_equal((bump_txid, "A", mempool_seq), seq.receive_sequence())
        mempool_seq += 1
        # Conflict announced first, then block
        assert_equal((bump_txid, "R", mempool_seq), seq.receive_sequence())
        mempool_seq += 1
        assert_equal((tip, "C", None), seq.receive_sequence())
        mempool_seq += len(more_tx)
        # Last tx
        assert_equal((orig_txid_2, "A", mempool_seq), seq.receive_sequence())
        mempool_seq += 1
        self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
        self.sync_all(
        )  # want to make sure we didn't break "consensus" for other tests

    def test_mempool_sync(self):
        """
        Use sequence notification plus getrawmempool sequence results to "sync mempool"
        """

        self.log.info("Testing 'mempool sync' usage of sequence notifier")
        [seq] = self.setup_zmq_test([("sequence", "tcp://127.0.0.1:28333")])

        # In-memory counter, should always start at 1
        next_mempool_seq = self.nodes[0].getrawmempool(
            mempool_sequence=True)["mempool_sequence"]
        assert_equal(next_mempool_seq, 1)

        # Some transactions have been happening but we aren't consuming zmq notifications yet
        # or we lost a ZMQ message somehow and want to start over
        txs = []
        num_txs = 5
        for _ in range(num_txs):
            txs.append(self.wallet.send_self_transfer(from_node=self.nodes[1]))
        self.sync_all()

        # 1) Consume backlog until we get a mempool sequence number
        (hash_str, label, zmq_mem_seq) = seq.receive_sequence()
        while zmq_mem_seq is None:
            (hash_str, label, zmq_mem_seq) = seq.receive_sequence()

        assert label == "A" or label == "R"
        assert hash_str is not None

        # 2) We need to "seed" our view of the mempool
        mempool_snapshot = self.nodes[0].getrawmempool(mempool_sequence=True)
        mempool_view = set(mempool_snapshot["txids"])
        get_raw_seq = mempool_snapshot["mempool_sequence"]
        assert_equal(get_raw_seq, 6)
        # Snapshot may be too old compared to zmq message we read off latest
        while zmq_mem_seq >= get_raw_seq:
            sleep(2)
            mempool_snapshot = self.nodes[0].getrawmempool(
                mempool_sequence=True)
            mempool_view = set(mempool_snapshot["txids"])
            get_raw_seq = mempool_snapshot["mempool_sequence"]

        # Things continue to happen in the "interim" while waiting for snapshot results
        # We have node 0 do all these to avoid p2p races with RBF announcements
        for _ in range(num_txs):
            txs.append(self.wallet.send_self_transfer(from_node=self.nodes[0]))
        txs[-1]['tx'].vout[0].nValue -= 1000
        self.nodes[0].sendrawtransaction(txs[-1]['tx'].serialize().hex())
        self.sync_all()
        self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
        final_txid = self.wallet.send_self_transfer(
            from_node=self.nodes[0])['txid']

        # 3) Consume ZMQ backlog until we get to "now" for the mempool snapshot
        while True:
            if zmq_mem_seq == get_raw_seq - 1:
                break
            (hash_str, label, mempool_sequence) = seq.receive_sequence()
            if mempool_sequence is not None:
                zmq_mem_seq = mempool_sequence
                if zmq_mem_seq > get_raw_seq:
                    raise Exception(
                        f"We somehow jumped mempool sequence numbers! zmq_mem_seq: {zmq_mem_seq} > get_raw_seq: {get_raw_seq}"
                    )

        # 4) Moving forward, we apply the delta to our local view
        #    remaining txs(5) + 1 rbf(A+R) + 1 block connect + 1 final tx
        expected_sequence = get_raw_seq
        r_gap = 0
        for _ in range(num_txs + 2 + 1 + 1):
            (hash_str, label, mempool_sequence) = seq.receive_sequence()
            if mempool_sequence is not None:
                if mempool_sequence != expected_sequence:
                    # Detected "R" gap, means this a conflict eviction, and mempool tx are being evicted before its
                    # position in the incoming block message "C"
                    if label == "R":
                        assert mempool_sequence > expected_sequence
                        r_gap += mempool_sequence - expected_sequence
                    else:
                        raise Exception(
                            f"WARNING: txhash has unexpected mempool sequence value: {mempool_sequence} vs expected {expected_sequence}"
                        )
            if label == "A":
                assert hash_str not in mempool_view
                mempool_view.add(hash_str)
                expected_sequence = mempool_sequence + 1
            elif label == "R":
                assert hash_str in mempool_view
                mempool_view.remove(hash_str)
                expected_sequence = mempool_sequence + 1
            elif label == "C":
                # (Attempt to) remove all txids from known block connects
                block_txids = self.nodes[0].getblock(hash_str)["tx"][1:]
                for txid in block_txids:
                    if txid in mempool_view:
                        expected_sequence += 1
                        mempool_view.remove(txid)
                expected_sequence -= r_gap
                r_gap = 0
            elif label == "D":
                # Not useful for mempool tracking per se
                continue
            else:
                raise Exception("Unexpected ZMQ sequence label!")

        assert_equal(self.nodes[0].getrawmempool(), [final_txid])
        assert_equal(
            self.nodes[0].getrawmempool(
                mempool_sequence=True)["mempool_sequence"], expected_sequence)

        # 5) If you miss a zmq/mempool sequence number, go back to step (2)

        self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)

    def test_multiple_interfaces(self):
        # Set up two subscribers with different addresses
        # (note that after the reorg test, syncing would fail due to different
        # chain lengths on node0 and node1; for this test we only need node0, so
        # we can disable syncing blocks on the setup)
        subscribers = self.setup_zmq_test([
            ("hashblock", "tcp://127.0.0.1:28334"),
            ("hashblock", "tcp://127.0.0.1:28335"),
        ],
                                          sync_blocks=False)

        # Generate 1 block in nodes[0] and receive all notifications
        self.generatetoaddress(self.nodes[0],
                               1,
                               ADDRESS_BCRT1_UNSPENDABLE,
                               sync_fun=self.no_op)

        # Should receive the same block hash on both subscribers
        assert_equal(self.nodes[0].getbestblockhash(),
                     subscribers[0].receive().hex())
        assert_equal(self.nodes[0].getbestblockhash(),
                     subscribers[1].receive().hex())

    def test_ipv6(self):
        if not test_ipv6_local():
            self.log.info("Skipping IPv6 test, because IPv6 is not supported.")
            return
        self.log.info("Testing IPv6")
        # Set up subscriber using IPv6 loopback address
        subscribers = self.setup_zmq_test([("hashblock", "tcp://[::1]:28332")],
                                          ipv6=True)

        # Generate 1 block in nodes[0]
        self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)

        # Should receive the same block hash
        assert_equal(self.nodes[0].getbestblockhash(),
                     subscribers[0].receive().hex())
예제 #19
0
class MempoolPersistTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 3
        self.extra_args = [[], ["-persistmempool=0"], []]

    def run_test(self):
        self.mini_wallet = MiniWallet(self.nodes[2])
        self.mini_wallet.rescan_utxos()
        if self.is_sqlite_compiled():
            self.nodes[2].createwallet(
                wallet_name="watch",
                descriptors=True,
                disable_private_keys=True,
                load_on_startup=False,
            )
            wallet_watch = self.nodes[2].get_wallet_rpc("watch")
            assert_equal([{'success': True}], wallet_watch.importdescriptors([{'desc': self.mini_wallet.get_descriptor(), 'timestamp': 0}]))

        self.log.debug("Send 5 transactions from node2 (to its own address)")
        tx_creation_time_lower = int(time.time())
        for _ in range(5):
            last_txid = self.mini_wallet.send_self_transfer(from_node=self.nodes[2])["txid"]
        if self.is_sqlite_compiled():
            self.nodes[2].syncwithvalidationinterfacequeue()  # Flush mempool to wallet
            node2_balance = wallet_watch.getbalance()
        self.sync_all()
        tx_creation_time_higher = int(time.time())

        self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
        assert_equal(len(self.nodes[0].getrawmempool()), 5)
        assert_equal(len(self.nodes[1].getrawmempool()), 5)

        total_fee_old = self.nodes[0].getmempoolinfo()['total_fee']

        self.log.debug("Prioritize a transaction on node0")
        fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
        assert_equal(fees['base'], fees['modified'])
        self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
        fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
        assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])

        self.log.info('Check the total base fee is unchanged after prioritisetransaction')
        assert_equal(total_fee_old, self.nodes[0].getmempoolinfo()['total_fee'])
        assert_equal(total_fee_old, sum(v['fees']['base'] for k, v in self.nodes[0].getrawmempool(verbose=True).items()))

        last_entry = self.nodes[0].getmempoolentry(txid=last_txid)
        tx_creation_time = last_entry['time']
        assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower)
        assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)

        # disconnect nodes & make a txn that remains in the unbroadcast set.
        self.disconnect_nodes(0, 1)
        assert_equal(len(self.nodes[0].getpeerinfo()), 0)
        assert_equal(len(self.nodes[0].p2ps), 0)
        self.mini_wallet.send_self_transfer(from_node=self.nodes[0])

        self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
        self.stop_nodes()
        # Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
        # Also don't store the mempool, to keep the datadir clean
        self.start_node(1, extra_args=["-persistmempool=0"])
        self.start_node(0)
        self.start_node(2)
        assert self.nodes[0].getmempoolinfo()["loaded"]  # start_node is blocking on the mempool being loaded
        assert self.nodes[2].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[0].getrawmempool()), 6)
        assert_equal(len(self.nodes[2].getrawmempool()), 5)
        # The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
        assert_equal(len(self.nodes[1].getrawmempool()), 0)

        self.log.debug('Verify prioritization is loaded correctly')
        fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
        assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])

        self.log.debug('Verify all fields are loaded correctly')
        assert_equal(last_entry, self.nodes[0].getmempoolentry(txid=last_txid))

        # Verify accounting of mempool transactions after restart is correct
        if self.is_sqlite_compiled():
            self.nodes[2].loadwallet("watch")
            wallet_watch = self.nodes[2].get_wallet_rpc("watch")
            self.nodes[2].syncwithvalidationinterfacequeue()  # Flush mempool to wallet
            assert_equal(node2_balance, wallet_watch.getbalance())

        self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
        self.stop_nodes()
        self.start_node(0, extra_args=["-persistmempool=0"])
        assert self.nodes[0].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[0].getrawmempool()), 0)

        self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
        self.stop_nodes()
        self.start_node(0)
        assert self.nodes[0].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[0].getrawmempool()), 6)

        mempooldat0 = os.path.join(self.nodes[0].datadir, self.chain, 'mempool.dat')
        mempooldat1 = os.path.join(self.nodes[1].datadir, self.chain, 'mempool.dat')
        self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
        os.remove(mempooldat0)
        result0 = self.nodes[0].savemempool()
        assert os.path.isfile(mempooldat0)
        assert_equal(result0['filename'], mempooldat0)

        self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 6 transactions")
        os.rename(mempooldat0, mempooldat1)
        self.stop_nodes()
        self.start_node(1, extra_args=["-persistmempool"])
        assert self.nodes[1].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[1].getrawmempool()), 6)

        self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
        # to test the exception we are creating a tmp folder called mempool.dat.new
        # which is an implementation detail that could change and break this test
        mempooldotnew1 = mempooldat1 + '.new'
        os.mkdir(mempooldotnew1)
        assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
        os.rmdir(mempooldotnew1)

        self.test_persist_unbroadcast()

    def test_persist_unbroadcast(self):
        node0 = self.nodes[0]
        self.start_node(0)

        # clear out mempool
        self.generate(node0, 1, sync_fun=self.no_op)

        # ensure node0 doesn't have any connections
        # make a transaction that will remain in the unbroadcast set
        assert_equal(len(node0.getpeerinfo()), 0)
        assert_equal(len(node0.p2ps), 0)
        self.mini_wallet.send_self_transfer(from_node=node0)

        # shutdown, then startup with wallet disabled
        self.restart_node(0, extra_args=["-disablewallet"])

        # check that txn gets broadcast due to unbroadcast logic
        conn = node0.add_p2p_connection(P2PTxInvStore())
        node0.mockscheduler(16 * 60)  # 15 min + 1 for buffer
        self.wait_until(lambda: len(conn.get_invs()) == 1)
예제 #20
0
class FilterTest(SyscoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.extra_args = [[
            '-peerbloomfilters',
            '[email protected]',  # immediate tx relay
        ]]

    def generatetoscriptpubkey(self, scriptpubkey):
        """Helper to generate a single block to the given scriptPubKey."""
        return self.generatetodescriptor(self.nodes[0], 1,
                                         f'raw({scriptpubkey.hex()})')[0]

    def test_size_limits(self, filter_peer):
        self.log.info('Check that too large filter is rejected')
        with self.nodes[0].assert_debug_log(['Misbehaving']):
            filter_peer.send_and_ping(
                msg_filterload(data=b'\xbb' * (MAX_BLOOM_FILTER_SIZE + 1)))

        self.log.info('Check that max size filter is accepted')
        with self.nodes[0].assert_debug_log([],
                                            unexpected_msgs=['Misbehaving']):
            filter_peer.send_and_ping(
                msg_filterload(data=b'\xbb' * (MAX_BLOOM_FILTER_SIZE)))
        filter_peer.send_and_ping(msg_filterclear())

        self.log.info(
            'Check that filter with too many hash functions is rejected')
        with self.nodes[0].assert_debug_log(['Misbehaving']):
            filter_peer.send_and_ping(
                msg_filterload(data=b'\xaa',
                               nHashFuncs=MAX_BLOOM_HASH_FUNCS + 1))

        self.log.info('Check that filter with max hash functions is accepted')
        with self.nodes[0].assert_debug_log([],
                                            unexpected_msgs=['Misbehaving']):
            filter_peer.send_and_ping(
                msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS))
        # Don't send filterclear until next two filteradd checks are done

        self.log.info(
            'Check that max size data element to add to the filter is accepted'
        )
        with self.nodes[0].assert_debug_log([],
                                            unexpected_msgs=['Misbehaving']):
            filter_peer.send_and_ping(
                msg_filteradd(data=b'\xcc' * (MAX_SCRIPT_ELEMENT_SIZE)))

        self.log.info(
            'Check that too large data element to add to the filter is rejected'
        )
        with self.nodes[0].assert_debug_log(['Misbehaving']):
            filter_peer.send_and_ping(
                msg_filteradd(data=b'\xcc' * (MAX_SCRIPT_ELEMENT_SIZE + 1)))

        filter_peer.send_and_ping(msg_filterclear())

    def test_msg_mempool(self):
        self.log.info(
            "Check that a node with bloom filters enabled services p2p mempool messages"
        )
        filter_peer = P2PBloomFilter()

        self.log.debug("Create a tx relevant to the peer before connecting")
        txid, _ = self.wallet.send_to(
            from_node=self.nodes[0],
            scriptPubKey=filter_peer.watch_script_pubkey,
            amount=9 * COIN)

        self.log.debug(
            "Send a mempool msg after connecting and check that the tx is received"
        )
        self.nodes[0].add_p2p_connection(filter_peer)
        filter_peer.send_and_ping(filter_peer.watch_filter_init)
        filter_peer.send_message(msg_mempool())
        filter_peer.wait_for_tx(txid)

    def test_frelay_false(self, filter_peer):
        self.log.info(
            "Check that a node with fRelay set to false does not receive invs until the filter is set"
        )
        filter_peer.tx_received = False
        self.wallet.send_to(from_node=self.nodes[0],
                            scriptPubKey=filter_peer.watch_script_pubkey,
                            amount=9 * COIN)
        # Sync to make sure the reason filter_peer doesn't receive the tx is not p2p delays
        filter_peer.sync_with_ping()
        assert not filter_peer.tx_received

        # Clear the mempool so that this transaction does not impact subsequent tests
        self.generate(self.nodes[0], 1, sync_fun=self.no_op)

    def test_filter(self, filter_peer):
        # Set the bloomfilter using filterload
        filter_peer.send_and_ping(filter_peer.watch_filter_init)
        # If fRelay is not already True, sending filterload sets it to True
        assert self.nodes[0].getpeerinfo()[0]['relaytxes']

        self.log.info(
            'Check that we receive merkleblock and tx if the filter matches a tx in a block'
        )
        block_hash = self.generatetoscriptpubkey(
            filter_peer.watch_script_pubkey)
        txid = self.nodes[0].getblock(block_hash)['tx'][0]
        filter_peer.wait_for_merkleblock(block_hash)
        filter_peer.wait_for_tx(txid)

        self.log.info(
            'Check that we only receive a merkleblock if the filter does not match a tx in a block'
        )
        filter_peer.tx_received = False
        block_hash = self.generatetoscriptpubkey(random_p2wpkh())
        filter_peer.wait_for_merkleblock(block_hash)
        assert not filter_peer.tx_received

        self.log.info(
            'Check that we not receive a tx if the filter does not match a mempool tx'
        )
        filter_peer.merkleblock_received = False
        filter_peer.tx_received = False
        self.wallet.send_to(from_node=self.nodes[0],
                            scriptPubKey=random_p2wpkh(),
                            amount=7 * COIN)
        filter_peer.sync_send_with_ping()
        assert not filter_peer.merkleblock_received
        assert not filter_peer.tx_received

        self.log.info(
            'Check that we receive a tx if the filter matches a mempool tx')
        filter_peer.merkleblock_received = False
        txid, _ = self.wallet.send_to(
            from_node=self.nodes[0],
            scriptPubKey=filter_peer.watch_script_pubkey,
            amount=9 * COIN)
        filter_peer.wait_for_tx(txid)
        assert not filter_peer.merkleblock_received

        self.log.info(
            'Check that after deleting filter all txs get relayed again')
        filter_peer.send_and_ping(msg_filterclear())
        for _ in range(5):
            txid, _ = self.wallet.send_to(from_node=self.nodes[0],
                                          scriptPubKey=random_p2wpkh(),
                                          amount=7 * COIN)
            filter_peer.wait_for_tx(txid)

        self.log.info(
            'Check that request for filtered blocks is ignored if no filter is set'
        )
        filter_peer.merkleblock_received = False
        filter_peer.tx_received = False
        with self.nodes[0].assert_debug_log(
                expected_msgs=['received getdata']):
            block_hash = self.generatetoscriptpubkey(random_p2wpkh())
            filter_peer.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))])
            filter_peer.sync_with_ping()
            assert not filter_peer.merkleblock_received
            assert not filter_peer.tx_received

        self.log.info(
            'Check that sending "filteradd" if no filter is set is treated as misbehavior'
        )
        with self.nodes[0].assert_debug_log(['Misbehaving']):
            filter_peer.send_and_ping(msg_filteradd(data=b'letsmisbehave'))

        self.log.info(
            "Check that division-by-zero remote crash bug [CVE-2013-5700] is fixed"
        )
        filter_peer.send_and_ping(msg_filterload(data=b'', nHashFuncs=1))
        filter_peer.send_and_ping(
            msg_filteradd(data=b'letstrytocrashthisnode'))
        self.nodes[0].disconnect_p2ps()

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        self.wallet.rescan_utxos()

        filter_peer = self.nodes[0].add_p2p_connection(P2PBloomFilter())
        self.log.info('Test filter size limits')
        self.test_size_limits(filter_peer)

        self.log.info('Test BIP 37 for a node with fRelay = True (default)')
        self.test_filter(filter_peer)
        self.nodes[0].disconnect_p2ps()

        self.log.info('Test BIP 37 for a node with fRelay = False')
        # Add peer but do not send version yet
        filter_peer_without_nrelay = self.nodes[0].add_p2p_connection(
            P2PBloomFilter(), send_version=False, wait_for_verack=False)
        # Send version with relay=False
        version_without_fRelay = msg_version()
        version_without_fRelay.nVersion = P2P_VERSION
        version_without_fRelay.strSubVer = P2P_SUBVERSION
        version_without_fRelay.nServices = P2P_SERVICES
        version_without_fRelay.relay = 0
        filter_peer_without_nrelay.send_message(version_without_fRelay)
        filter_peer_without_nrelay.wait_for_verack()
        assert not self.nodes[0].getpeerinfo()[0]['relaytxes']
        self.test_frelay_false(filter_peer_without_nrelay)
        self.test_filter(filter_peer_without_nrelay)

        self.test_msg_mempool()
예제 #21
0
class ChainstateWriteCrashTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 4
        self.rpc_timeout = 480
        self.supports_cli = False

        # Set -maxmempool=0 to turn off mempool memory sharing with dbcache
        # Set -rpcservertimeout=900 to reduce socket disconnects in this
        # long-running test
        self.base_args = ["-limitdescendantsize=0", "-maxmempool=0", "-rpcservertimeout=900", "-dbbatchsize=200000"]

        # Set different crash ratios and cache sizes.  Note that not all of
        # -dbcache goes to the in-memory coins cache.
        self.node0_args = ["-dbcrashratio=8", "-dbcache=4"] + self.base_args
        self.node1_args = ["-dbcrashratio=16", "-dbcache=8"] + self.base_args
        self.node2_args = ["-dbcrashratio=24", "-dbcache=16"] + self.base_args

        # Node3 is a normal node with default args, except will mine full blocks
        # and non-standard txs (e.g. txs with "dust" outputs)
        self.node3_args = ["-blockmaxweight=4000000", "-acceptnonstdtxn"]
        self.extra_args = [self.node0_args, self.node1_args, self.node2_args, self.node3_args]

    def setup_network(self):
        self.add_nodes(self.num_nodes, extra_args=self.extra_args)
        self.start_nodes()
        # Leave them unconnected, we'll use submitblock directly in this test

    def restart_node(self, node_index, expected_tip):
        """Start up a given node id, wait for the tip to reach the given block hash, and calculate the utxo hash.

        Exceptions on startup should indicate node crash (due to -dbcrashratio), in which case we try again. Give up
        after 60 seconds. Returns the utxo hash of the given node."""

        time_start = time.time()
        while time.time() - time_start < 120:
            try:
                # Any of these RPC calls could throw due to node crash
                self.start_node(node_index)
                self.nodes[node_index].waitforblock(expected_tip)
                utxo_hash = self.nodes[node_index].gettxoutsetinfo()['hash_serialized_2']
                return utxo_hash
            except:
                # An exception here should mean the node is about to crash.
                # If bitcoind exits, then try again.  wait_for_node_exit()
                # should raise an exception if bitcoind doesn't exit.
                self.wait_for_node_exit(node_index, timeout=10)
            self.crashed_on_restart += 1
            time.sleep(1)

        # If we got here, bitcoind isn't coming back up on restart.  Could be a
        # bug in bitcoind, or we've gotten unlucky with our dbcrash ratio --
        # perhaps we generated a test case that blew up our cache?
        # TODO: If this happens a lot, we should try to restart without -dbcrashratio
        # and make sure that recovery happens.
        raise AssertionError(f"Unable to successfully restart node {node_index} in allotted time")

    def submit_block_catch_error(self, node_index, block):
        """Try submitting a block to the given node.

        Catch any exceptions that indicate the node has crashed.
        Returns true if the block was submitted successfully; false otherwise."""

        try:
            self.nodes[node_index].submitblock(block)
            return True
        except (http.client.CannotSendRequest, http.client.RemoteDisconnected) as e:
            self.log.debug(f"node {node_index} submitblock raised exception: {e}")
            return False
        except OSError as e:
            self.log.debug(f"node {node_index} submitblock raised OSError exception: errno={e.errno}")
            if e.errno in [errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET]:
                # The node has likely crashed
                return False
            else:
                # Unexpected exception, raise
                raise

    def sync_node3blocks(self, block_hashes):
        """Use submitblock to sync node3's chain with the other nodes

        If submitblock fails, restart the node and get the new utxo hash.
        If any nodes crash while updating, we'll compare utxo hashes to
        ensure recovery was successful."""

        node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized_2']

        # Retrieve all the blocks from node3
        blocks = []
        for block_hash in block_hashes:
            blocks.append([block_hash, self.nodes[3].getblock(block_hash, 0)])

        # Deliver each block to each other node
        for i in range(3):
            nodei_utxo_hash = None
            self.log.debug(f"Syncing blocks to node {i}")
            for (block_hash, block) in blocks:
                # Get the block from node3, and submit to node_i
                self.log.debug(f"submitting block {block_hash}")
                if not self.submit_block_catch_error(i, block):
                    # TODO: more carefully check that the crash is due to -dbcrashratio
                    # (change the exit code perhaps, and check that here?)
                    self.wait_for_node_exit(i, timeout=30)
                    self.log.debug(f"Restarting node {i} after block hash {block_hash}")
                    nodei_utxo_hash = self.restart_node(i, block_hash)
                    assert nodei_utxo_hash is not None
                    self.restart_counts[i] += 1
                else:
                    # Clear it out after successful submitblock calls -- the cached
                    # utxo hash will no longer be correct
                    nodei_utxo_hash = None

            # Check that the utxo hash matches node3's utxo set
            # NOTE: we only check the utxo set if we had to restart the node
            # after the last block submitted:
            # - checking the utxo hash causes a cache flush, which we don't
            # want to do every time; so
            # - we only update the utxo cache after a node restart, since flushing
            # the cache is a no-op at that point
            if nodei_utxo_hash is not None:
                self.log.debug(f"Checking txoutsetinfo matches for node {i}")
                assert_equal(nodei_utxo_hash, node3_utxo_hash)

    def verify_utxo_hash(self):
        """Verify that the utxo hash of each node matches node3.

        Restart any nodes that crash while querying."""
        node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized_2']
        self.log.info("Verifying utxo hash matches for all nodes")

        for i in range(3):
            try:
                nodei_utxo_hash = self.nodes[i].gettxoutsetinfo()['hash_serialized_2']
            except OSError:
                # probably a crash on db flushing
                nodei_utxo_hash = self.restart_node(i, self.nodes[3].getbestblockhash())
            assert_equal(nodei_utxo_hash, node3_utxo_hash)

    def generate_small_transactions(self, node, count, utxo_list):
        FEE = 1000  # TODO: replace this with node relay fee based calculation
        num_transactions = 0
        random.shuffle(utxo_list)
        while len(utxo_list) >= 2 and num_transactions < count:
            utxos_to_spend = [utxo_list.pop() for _ in range(2)]
            input_amount = int(sum([utxo['value'] for utxo in utxos_to_spend]) * COIN)
            if input_amount < FEE:
                # Sanity check -- if we chose inputs that are too small, skip
                continue

            tx = self.wallet.create_self_transfer_multi(
                from_node=node,
                utxos_to_spend=utxos_to_spend,
                num_outputs=3,
                fee_per_output=FEE // 3)

            # Send the transaction to get into the mempool (skip fee-checks to run faster)
            node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
            num_transactions += 1

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[3])
        self.wallet.rescan_utxos()
        initial_height = self.nodes[3].getblockcount()
        self.generate(self.nodes[3], COINBASE_MATURITY, sync_fun=self.no_op)

        # Track test coverage statistics
        self.restart_counts = [0, 0, 0]  # Track the restarts for nodes 0-2
        self.crashed_on_restart = 0      # Track count of crashes during recovery

        # Start by creating a lot of utxos on node3
        utxo_list = self.wallet.send_self_transfer_multi(from_node=self.nodes[3], num_outputs=5000)['new_utxos']
        self.generate(self.nodes[3], 1, sync_fun=self.no_op)
        assert_equal(len(self.nodes[3].getrawmempool()), 0)
        self.log.info(f"Prepped {len(utxo_list)} utxo entries")

        # Sync these blocks with the other nodes
        block_hashes_to_sync = []
        for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1):
            block_hashes_to_sync.append(self.nodes[3].getblockhash(height))

        self.log.debug(f"Syncing {len(block_hashes_to_sync)} blocks with other nodes")
        # Syncing the blocks could cause nodes to crash, so the test begins here.
        self.sync_node3blocks(block_hashes_to_sync)

        starting_tip_height = self.nodes[3].getblockcount()

        # Main test loop:
        # each time through the loop, generate a bunch of transactions,
        # and then either mine a single new block on the tip, or some-sized reorg.
        for i in range(40):
            self.log.info(f"Iteration {i}, generating 2500 transactions {self.restart_counts}")
            # Generate a bunch of small-ish transactions
            self.generate_small_transactions(self.nodes[3], 2500, utxo_list)
            # Pick a random block between current tip, and starting tip
            current_height = self.nodes[3].getblockcount()
            random_height = random.randint(starting_tip_height, current_height)
            self.log.debug(f"At height {current_height}, considering height {random_height}")
            if random_height > starting_tip_height:
                # Randomly reorg from this point with some probability (1/4 for
                # tip, 1/5 for tip-1, ...)
                if random.random() < 1.0 / (current_height + 4 - random_height):
                    self.log.debug(f"Invalidating block at height {random_height}")
                    self.nodes[3].invalidateblock(self.nodes[3].getblockhash(random_height))

            # Now generate new blocks until we pass the old tip height
            self.log.debug("Mining longer tip")
            block_hashes = []
            while current_height + 1 > self.nodes[3].getblockcount():
                block_hashes.extend(self.generatetoaddress(
                    self.nodes[3],
                    nblocks=min(10, current_height + 1 - self.nodes[3].getblockcount()),
                    # new address to avoid mining a block that has just been invalidated
                    address=getnewdestination()[2],
                    sync_fun=self.no_op,
                ))
            self.log.debug(f"Syncing {len(block_hashes)} new blocks...")
            self.sync_node3blocks(block_hashes)
            self.wallet.rescan_utxos()
            utxo_list = self.wallet.get_utxos()
            self.log.debug(f"MiniWallet utxo count: {len(utxo_list)}")

        # Check that the utxo hashes agree with node3
        # Useful side effect: each utxo cache gets flushed here, so that we
        # won't get crashes on shutdown at the end of the test.
        self.verify_utxo_hash()

        # Check the test coverage
        self.log.info(f"Restarted nodes: {self.restart_counts}; crashes on restart: {self.crashed_on_restart}")

        # If no nodes were restarted, we didn't test anything.
        assert self.restart_counts != [0, 0, 0]

        # Make sure we tested the case of crash-during-recovery.
        assert self.crashed_on_restart > 0

        # Warn if any of the nodes escaped restart.
        for i in range(3):
            if self.restart_counts[i] == 0:
                self.log.warning(f"Node {i} never crashed during utxo flush!")
예제 #22
0
    def run_test(self):
        wallet = MiniWallet(self.nodes[0])

        # Start with a 200 block chain
        assert_equal(self.nodes[0].getblockcount(), 200)

        self.log.info("Add 4 coinbase utxos to the miniwallet")
        # Block 76 contains the first spendable coinbase txs.
        first_block = 76
        wallet.rescan_utxos()

        # Three scenarios for re-orging coinbase spends in the memory pool:
        # 1. Direct coinbase spend  :  spend_1
        # 2. Indirect (coinbase spend in chain, child in mempool) : spend_2 and spend_2_1
        # 3. Indirect (coinbase and child both in chain) : spend_3 and spend_3_1
        # Use invalidateblock to make all of the above coinbase spends invalid (immature coinbase),
        # and make sure the mempool code behaves correctly.
        b = [
            self.nodes[0].getblockhash(n)
            for n in range(first_block, first_block + 4)
        ]
        coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b]
        utxo_1 = wallet.get_utxo(txid=coinbase_txids[1])
        utxo_2 = wallet.get_utxo(txid=coinbase_txids[2])
        utxo_3 = wallet.get_utxo(txid=coinbase_txids[3])
        self.log.info(
            "Create three transactions spending from coinbase utxos: spend_1, spend_2, spend_3"
        )
        spend_1 = wallet.create_self_transfer(from_node=self.nodes[0],
                                              utxo_to_spend=utxo_1)
        spend_2 = wallet.create_self_transfer(from_node=self.nodes[0],
                                              utxo_to_spend=utxo_2)
        spend_3 = wallet.create_self_transfer(from_node=self.nodes[0],
                                              utxo_to_spend=utxo_3)

        self.log.info(
            "Create another transaction which is time-locked to two blocks in the future"
        )
        utxo = wallet.get_utxo(txid=coinbase_txids[0])
        timelock_tx = wallet.create_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=utxo,
            mempool_valid=False,
            locktime=self.nodes[0].getblockcount() + 2)['hex']

        self.log.info(
            "Check that the time-locked transaction is too immature to spend")
        assert_raises_rpc_error(-26, "non-final",
                                self.nodes[0].sendrawtransaction, timelock_tx)

        self.log.info("Broadcast and mine spend_2 and spend_3")
        wallet.sendrawtransaction(from_node=self.nodes[0],
                                  tx_hex=spend_2['hex'])
        wallet.sendrawtransaction(from_node=self.nodes[0],
                                  tx_hex=spend_3['hex'])
        self.log.info("Generate a block")
        self.generate(self.nodes[0], 1)
        self.log.info(
            "Check that time-locked transaction is still too immature to spend"
        )
        assert_raises_rpc_error(-26, 'non-final',
                                self.nodes[0].sendrawtransaction, timelock_tx)

        self.log.info("Create spend_2_1 and spend_3_1")
        spend_2_utxo = wallet.get_utxo(txid=spend_2['txid'])
        spend_2_1 = wallet.create_self_transfer(from_node=self.nodes[0],
                                                utxo_to_spend=spend_2_utxo)
        spend_3_utxo = wallet.get_utxo(txid=spend_3['txid'])
        spend_3_1 = wallet.create_self_transfer(from_node=self.nodes[0],
                                                utxo_to_spend=spend_3_utxo)

        self.log.info("Broadcast and mine spend_3_1")
        spend_3_1_id = self.nodes[0].sendrawtransaction(spend_3_1['hex'])
        self.log.info("Generate a block")
        last_block = self.generate(self.nodes[0], 1)
        # generate() implicitly syncs blocks, so that peer 1 gets the block before timelock_tx
        # Otherwise, peer 1 would put the timelock_tx in m_recent_rejects

        self.log.info("The time-locked transaction can now be spent")
        timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)

        self.log.info("Add spend_1 and spend_2_1 to the mempool")
        spend_1_id = self.nodes[0].sendrawtransaction(spend_1['hex'])
        spend_2_1_id = self.nodes[0].sendrawtransaction(spend_2_1['hex'])

        assert_equal(set(self.nodes[0].getrawmempool()),
                     {spend_1_id, spend_2_1_id, timelock_tx_id})
        self.sync_all()

        self.log.info("invalidate the last block")
        for node in self.nodes:
            node.invalidateblock(last_block[0])
        self.log.info(
            "The time-locked transaction is now too immature and has been removed from the mempool"
        )
        self.log.info(
            "spend_3_1 has been re-orged out of the chain and is back in the mempool"
        )
        assert_equal(set(self.nodes[0].getrawmempool()),
                     {spend_1_id, spend_2_1_id, spend_3_1_id})

        self.log.info(
            "Use invalidateblock to re-org back and make all those coinbase spends immature/invalid"
        )
        b = self.nodes[0].getblockhash(first_block + 100)
        for node in self.nodes:
            node.invalidateblock(b)

        self.log.info("Check that the mempool is empty")
        assert_equal(set(self.nodes[0].getrawmempool()), set())
        self.sync_all()
예제 #23
0
class MempoolAcceptanceTest(UmkoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.extra_args = [[
            '-txindex','-permitbaremultisig=0',
        ]] * self.num_nodes
        self.supports_cli = False

    def check_mempool_result(self, result_expected, *args, **kwargs):
        """Wrapper to check result of testmempoolaccept on node_0's mempool"""
        result_test = self.nodes[0].testmempoolaccept(*args, **kwargs)
        for r in result_test:
            r.pop('wtxid')  # Skip check for now
        assert_equal(result_expected, result_test)
        assert_equal(self.nodes[0].getmempoolinfo()['size'], self.mempool_size)  # Must not change mempool state

    def run_test(self):
        node = self.nodes[0]
        self.wallet = MiniWallet(node)
        self.wallet.rescan_utxos()

        self.log.info('Start with empty mempool, and 200 blocks')
        self.mempool_size = 0
        assert_equal(node.getblockcount(), 200)
        assert_equal(node.getmempoolinfo()['size'], self.mempool_size)

        self.log.info('Should not accept garbage to testmempoolaccept')
        assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
        assert_raises_rpc_error(-8, 'Array must contain between 1 and 25 transactions.', lambda: node.testmempoolaccept(rawtxs=['ff22']*26))
        assert_raises_rpc_error(-8, 'Array must contain between 1 and 25 transactions.', lambda: node.testmempoolaccept(rawtxs=[]))
        assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))

        self.log.info('A transaction already in the blockchain')
        tx = self.wallet.create_self_transfer()['tx']  # Pick a random coin(base) to spend
        tx.vout.append(deepcopy(tx.vout[0]))
        tx.vout[0].nValue = int(0.3 * COIN)
        tx.vout[1].nValue = int(49 * COIN)
        raw_tx_in_block = tx.serialize().hex()
        txid_in_block = self.wallet.sendrawtransaction(from_node=node, tx_hex=raw_tx_in_block, maxfeerate=0)
        self.generate(node, 1)
        self.mempool_size = 0
        self.check_mempool_result(
            result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known'}],
            rawtxs=[raw_tx_in_block],
        )

        self.log.info('A transaction not in the mempool')
        fee = Decimal('0.000007')
        utxo_to_spend = self.wallet.get_utxo(txid=txid_in_block)  # use 0.3 UMK UTXO
        tx = self.wallet.create_self_transfer(utxo_to_spend=utxo_to_spend, sequence=BIP125_SEQUENCE_NUMBER)['tx']
        tx.vout[0].nValue = int((Decimal('0.3') - fee) * COIN)
        raw_tx_0 = tx.serialize().hex()
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee}}],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A final transaction not in the mempool')
        output_amount = Decimal('0.025')
        tx = self.wallet.create_self_transfer(
            sequence=SEQUENCE_FINAL,
            locktime=node.getblockcount() + 2000,  # Can be anything
        )['tx']
        tx.vout[0].nValue = int(output_amount * COIN)
        raw_tx_final = tx.serialize().hex()
        tx = tx_from_hex(raw_tx_final)
        fee_expected = Decimal('50.0') - output_amount
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee_expected}}],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )
        node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0)
        self.mempool_size += 1

        self.log.info('A transaction in the mempool')
        node.sendrawtransaction(hexstring=raw_tx_0)
        self.mempool_size += 1
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'txn-already-in-mempool'}],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that replaces a mempool transaction')
        tx = tx_from_hex(raw_tx_0)
        tx.vout[0].nValue -= int(fee * COIN)  # Double the fee
        tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1  # Now, opt out of RBF
        raw_tx_0 = tx.serialize().hex()
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': (2 * fee)}}],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that conflicts with an unconfirmed tx')
        # Send the transaction that replaces the mempool transaction and opts out of replaceability
        node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
        # take original raw_tx_0
        tx = tx_from_hex(raw_tx_0)
        tx.vout[0].nValue -= int(4 * fee * COIN)  # Set more fee
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'txn-mempool-conflict'}],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )

        self.log.info('A transaction with missing inputs, that never existed')
        tx = tx_from_hex(raw_tx_0)
        tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with missing inputs, that existed once in the past')
        tx = tx_from_hex(raw_tx_0)
        tx.vin[0].prevout.n = 1  # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
        raw_tx_1 = tx.serialize().hex()
        txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0)
        # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
        tx = self.wallet.create_self_transfer()['tx']
        tx.vin.append(deepcopy(tx.vin[0]))
        tx.wit.vtxinwit.append(deepcopy(tx.wit.vtxinwit[0]))
        tx.vin[0].prevout = COutPoint(hash=int(txid_0, 16), n=0)
        tx.vin[1].prevout = COutPoint(hash=int(txid_1, 16), n=0)
        tx.vout[0].nValue = int(0.1 * COIN)
        raw_tx_spend_both = tx.serialize().hex()
        txid_spend_both = self.wallet.sendrawtransaction(from_node=node, tx_hex=raw_tx_spend_both, maxfeerate=0)
        self.generate(node, 1)
        self.mempool_size = 0
        # Now see if we can add the coins back to the utxo set by sending the exact txs again
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}],
            rawtxs=[raw_tx_0],
        )
        self.check_mempool_result(
            result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}],
            rawtxs=[raw_tx_1],
        )

        self.log.info('Create a "reference" tx for later use')
        utxo_to_spend = self.wallet.get_utxo(txid=txid_spend_both)
        tx = self.wallet.create_self_transfer(utxo_to_spend=utxo_to_spend, sequence=SEQUENCE_FINAL)['tx']
        tx.vout[0].nValue = int(0.05 * COIN)
        raw_tx_reference = tx.serialize().hex()
        # Reference tx should be valid on itself
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': { 'base': Decimal('0.1') - Decimal('0.05')}}],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )

        self.log.info('A transaction with no outputs')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout = []
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-empty'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A really large transaction')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin = [tx.vin[0]] * math.ceil(MAX_BLOCK_WEIGHT // 4 / len(tx.vin[0].serialize()))
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-oversize'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with negative output value')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].nValue *= -1
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-negative'}],
            rawtxs=[tx.serialize().hex()],
        )

        # The following two validations prevent overflow of the output amounts (see CVE-2010-5139).
        self.log.info('A transaction with too large output value')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].nValue = MAX_MONEY + 1
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-toolarge'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with too large sum of output values')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout = [tx.vout[0]] * 2
        tx.vout[0].nValue = MAX_MONEY
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-txouttotal-toolarge'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with duplicate inputs')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin = [tx.vin[0]] * 2
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-inputs-duplicate'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A non-coinbase transaction with coinbase-like outpoint')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin.append(CTxIn(COutPoint(hash=0, n=0xffffffff)))
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-prevout-null'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A coinbase transaction')
        # Pick the input of the first tx we created, so it has to be a coinbase tx
        raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
        tx = tx_from_hex(raw_tx_coinbase_spent)
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'coinbase'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('Some nonstandard transactions')
        tx = tx_from_hex(raw_tx_reference)
        tx.nVersion = 3  # A version currently non-standard
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'version'}],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].scriptPubKey = CScript([OP_0])  # Some non-standard script
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptpubkey'}],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        key = ECKey()
        key.generate()
        pubkey = key.get_pubkey().get_bytes()
        tx.vout[0].scriptPubKey = keys_to_multisig_script([pubkey] * 3, k=2)  # Some bare multisig script (2-of-3)
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bare-multisig'}],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].scriptSig = CScript([OP_HASH160])  # Some not-pushonly scriptSig
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-not-pushonly'}],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].scriptSig = CScript([b'a' * 1648]) # Some too large scriptSig (>1650 bytes)
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size'}],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=script_to_p2sh_script(b'burn'))
        num_scripts = 100000 // len(output_p2sh_burn.serialize())  # Use enough outputs to make the tx too large for our policy
        tx.vout = [output_p2sh_burn] * num_scripts
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size'}],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0] = output_p2sh_burn
        tx.vout[0].nValue -= 1  # Make output smaller, such that it is dust for our policy
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
        tx.vout = [tx.vout[0]] * 2
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A timelocked transaction')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].nSequence -= 1  # Should be non-max, so locktime is not ignored
        tx.nLockTime = node.getblockcount() + 1
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-final'}],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction that is locked by BIP68 sequence logic')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].nSequence = 2  # We could include it in the second block mined from now, but not the very next one
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final'}],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )
예제 #24
0
class P2PBlocksOnly(UmkoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.extra_args = [["-blocksonly"]]

    def run_test(self):
        self.miniwallet = MiniWallet(self.nodes[0])
        # Add enough mature utxos to the wallet, so that all txs spend confirmed coins
        self.miniwallet.rescan_utxos()

        self.blocksonly_mode_tests()
        self.blocks_relay_conn_tests()

    def blocksonly_mode_tests(self):
        self.log.info("Tests with node running in -blocksonly mode")
        assert_equal(self.nodes[0].getnetworkinfo()['localrelay'], False)

        self.nodes[0].add_p2p_connection(P2PInterface())
        tx, txid, wtxid, tx_hex = self.check_p2p_tx_violation()

        self.log.info('Check that tx invs also violate the protocol')
        self.nodes[0].add_p2p_connection(P2PInterface())
        with self.nodes[0].assert_debug_log(['transaction (0000000000000000000000000000000000000000000000000000000000001234) inv sent in violation of protocol, disconnecting peer']):
            self.nodes[0].p2ps[0].send_message(msg_inv([CInv(t=MSG_WTX, h=0x1234)]))
            self.nodes[0].p2ps[0].wait_for_disconnect()
            del self.nodes[0].p2ps[0]

        self.log.info('Check that txs from rpc are not rejected and relayed to other peers')
        tx_relay_peer = self.nodes[0].add_p2p_connection(P2PInterface())
        assert_equal(self.nodes[0].getpeerinfo()[0]['relaytxes'], True)

        assert_equal(self.nodes[0].testmempoolaccept([tx_hex])[0]['allowed'], True)
        with self.nodes[0].assert_debug_log(['received getdata for: wtx {} peer'.format(wtxid)]):
            self.nodes[0].sendrawtransaction(tx_hex)
            tx_relay_peer.wait_for_tx(txid)
            assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)

        self.log.info("Restarting node 0 with relay permission and blocksonly")
        self.restart_node(0, ["-persistmempool=0", "[email protected]", "-blocksonly"])
        assert_equal(self.nodes[0].getrawmempool(), [])
        first_peer = self.nodes[0].add_p2p_connection(P2PInterface())
        second_peer = self.nodes[0].add_p2p_connection(P2PInterface())
        peer_1_info = self.nodes[0].getpeerinfo()[0]
        assert_equal(peer_1_info['permissions'], ['relay'])
        peer_2_info = self.nodes[0].getpeerinfo()[1]
        assert_equal(peer_2_info['permissions'], ['relay'])
        assert_equal(self.nodes[0].testmempoolaccept([tx_hex])[0]['allowed'], True)

        self.log.info('Check that the tx from first_peer with relay-permission is relayed to others (ie.second_peer)')
        with self.nodes[0].assert_debug_log(["received getdata"]):
            # Note that normally, first_peer would never send us transactions since we're a blocksonly node.
            # By activating blocksonly, we explicitly tell our peers that they should not send us transactions,
            # and Umkoin Core respects that choice and will not send transactions.
            # But if, for some reason, first_peer decides to relay transactions to us anyway, we should relay them to
            # second_peer since we gave relay permission to first_peer.
            # See https://github.com/bitcoin/bitcoin/issues/19943 for details.
            first_peer.send_message(msg_tx(tx))
            self.log.info('Check that the peer with relay-permission is still connected after sending the transaction')
            assert_equal(first_peer.is_connected, True)
            second_peer.wait_for_tx(txid)
            assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)
        self.log.info("Relay-permission peer's transaction is accepted and relayed")

        self.nodes[0].disconnect_p2ps()
        self.generate(self.nodes[0], 1)

    def blocks_relay_conn_tests(self):
        self.log.info('Tests with node in normal mode with block-relay-only connections')
        self.restart_node(0, ["-noblocksonly"])  # disables blocks only mode
        assert_equal(self.nodes[0].getnetworkinfo()['localrelay'], True)

        # Ensure we disconnect if a block-relay-only connection sends us a transaction
        self.nodes[0].add_outbound_p2p_connection(P2PInterface(), p2p_idx=0, connection_type="block-relay-only")
        assert_equal(self.nodes[0].getpeerinfo()[0]['relaytxes'], False)
        _, txid, _, tx_hex = self.check_p2p_tx_violation()

        self.log.info("Check that txs from RPC are not sent to blockrelay connection")
        conn = self.nodes[0].add_outbound_p2p_connection(P2PTxInvStore(), p2p_idx=1, connection_type="block-relay-only")

        self.nodes[0].sendrawtransaction(tx_hex)

        # Bump time forward to ensure nNextInvSend timer pops
        self.nodes[0].setmocktime(int(time.time()) + 60)

        conn.sync_send_with_ping()
        assert(int(txid, 16) not in conn.get_invs())

    def check_p2p_tx_violation(self):
        self.log.info('Check that txs from P2P are rejected and result in disconnect')
        spendtx = self.miniwallet.create_self_transfer()

        with self.nodes[0].assert_debug_log(['transaction sent in violation of protocol peer=0']):
            self.nodes[0].p2ps[0].send_message(msg_tx(spendtx['tx']))
            self.nodes[0].p2ps[0].wait_for_disconnect()
            assert_equal(self.nodes[0].getmempoolinfo()['size'], 0)

        # Remove the disconnected peer
        del self.nodes[0].p2ps[0]

        return spendtx['tx'], spendtx['txid'], spendtx['wtxid'], spendtx['hex']
예제 #25
0
class ReplaceByFeeTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 2
        self.extra_args = [
            [
                "-maxorphantx=1000",
                "-limitancestorcount=50",
                "-limitancestorsize=101",
                "-limitdescendantcount=200",
                "-limitdescendantsize=101",
            ],
            # second node has default mempool parameters
            [],
        ]
        self.supports_cli = False

    def run_test(self):
        self.wallet = MiniWallet(self.nodes[0])
        # the pre-mined test framework chain contains coinbase outputs to the
        # MiniWallet's default address in blocks 76-100 (see method
        # BitcoinTestFramework._initialize_chain())
        self.wallet.rescan_utxos()

        self.log.info("Running test simple doublespend...")
        self.test_simple_doublespend()

        self.log.info("Running test doublespend chain...")
        self.test_doublespend_chain()

        self.log.info("Running test doublespend tree...")
        self.test_doublespend_tree()

        self.log.info("Running test replacement feeperkb...")
        self.test_replacement_feeperkb()

        self.log.info("Running test spends of conflicting outputs...")
        self.test_spends_of_conflicting_outputs()

        self.log.info("Running test new unconfirmed inputs...")
        self.test_new_unconfirmed_inputs()

        self.log.info("Running test too many replacements...")
        self.test_too_many_replacements()

        self.log.info(
            "Running test too many replacements using default mempool params..."
        )
        self.test_too_many_replacements_with_default_mempool_params()

        self.log.info("Running test opt-in...")
        self.test_opt_in()

        self.log.info("Running test RPC...")
        self.test_rpc()

        self.log.info("Running test prioritised transactions...")
        self.test_prioritised_transactions()

        self.log.info("Running test no inherited signaling...")
        self.test_no_inherited_signaling()

        self.log.info("Running test replacement relay fee...")
        self.test_replacement_relay_fee()

        self.log.info("Running test full replace by fee...")
        self.test_fullrbf()

        self.log.info("Passed")

    def make_utxo(self, node, amount, *, confirmed=True, scriptPubKey=None):
        """Create a txout with a given amount and scriptPubKey

        confirmed - txout created will be confirmed in the blockchain;
                    unconfirmed otherwise.
        """
        txid, n = self.wallet.send_to(from_node=node,
                                      scriptPubKey=scriptPubKey
                                      or self.wallet.get_scriptPubKey(),
                                      amount=amount)

        if confirmed:
            mempool_size = len(node.getrawmempool())
            while mempool_size > 0:
                self.generate(node, 1)
                new_size = len(node.getrawmempool())
                # Error out if we have something stuck in the mempool, as this
                # would likely be a bug.
                assert new_size < mempool_size
                mempool_size = new_size

        return self.wallet.get_utxo(txid=txid, vout=n)

    def test_simple_doublespend(self):
        """Simple doublespend"""
        # we use MiniWallet to create a transaction template with inputs correctly set,
        # and modify the output (amount, scriptPubKey) according to our needs
        tx = self.wallet.create_self_transfer()["tx"]
        tx1a_txid = self.nodes[0].sendrawtransaction(tx.serialize().hex())

        # Should fail because we haven't changed the fee
        tx.vout[0].scriptPubKey[-1] ^= 1

        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction,
                                tx.serialize().hex(), 0)

        # Extra 0.1 BTC fee
        tx.vout[0].nValue -= int(0.1 * COIN)
        tx1b_hex = tx.serialize().hex()
        # Works when enabled
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)

        mempool = self.nodes[0].getrawmempool()

        assert tx1a_txid not in mempool
        assert tx1b_txid in mempool

        assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))

    def test_doublespend_chain(self):
        """Doublespend of a long chain"""

        initial_nValue = 5 * COIN
        tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)

        prevout = tx0_outpoint
        remaining_value = initial_nValue
        chain_txids = []
        while remaining_value > 1 * COIN:
            remaining_value -= int(0.1 * COIN)
            prevout = self.wallet.send_self_transfer(
                from_node=self.nodes[0],
                utxo_to_spend=prevout,
                sequence=0,
                fee=Decimal("0.1"),
            )["new_utxo"]
            chain_txids.append(prevout["txid"])

        # Whether the double-spend is allowed is evaluated by including all
        # child fees - 4 BTC - so this attempt is rejected.
        dbl_tx = self.wallet.create_self_transfer(
            utxo_to_spend=tx0_outpoint,
            sequence=0,
            fee=Decimal("3"),
        )["tx"]
        dbl_tx_hex = dbl_tx.serialize().hex()

        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, dbl_tx_hex,
                                0)

        # Accepted with sufficient fee
        dbl_tx.vout[0].nValue = int(0.1 * COIN)
        dbl_tx_hex = dbl_tx.serialize().hex()
        self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)

        mempool = self.nodes[0].getrawmempool()
        for doublespent_txid in chain_txids:
            assert doublespent_txid not in mempool

    def test_doublespend_tree(self):
        """Doublespend of a big tree of transactions"""

        initial_nValue = 5 * COIN
        tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)

        def branch(prevout,
                   initial_value,
                   max_txs,
                   tree_width=5,
                   fee=0.00001 * COIN,
                   _total_txs=None):
            if _total_txs is None:
                _total_txs = [0]
            if _total_txs[0] >= max_txs:
                return

            txout_value = (initial_value - fee) // tree_width
            if txout_value < fee:
                return

            tx = self.wallet.send_self_transfer_multi(
                utxos_to_spend=[prevout],
                from_node=self.nodes[0],
                sequence=0,
                num_outputs=tree_width,
                amount_per_output=txout_value,
            )

            yield tx["txid"]
            _total_txs[0] += 1

            for utxo in tx["new_utxos"]:
                for x in branch(utxo,
                                txout_value,
                                max_txs,
                                tree_width=tree_width,
                                fee=fee,
                                _total_txs=_total_txs):
                    yield x

        fee = int(0.00001 * COIN)
        n = MAX_REPLACEMENT_LIMIT
        tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
        assert_equal(len(tree_txs), n)

        # Attempt double-spend, will fail because too little fee paid
        dbl_tx_hex = self.wallet.create_self_transfer(
            utxo_to_spend=tx0_outpoint,
            sequence=0,
            fee=(Decimal(fee) / COIN) * n,
        )["hex"]
        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, dbl_tx_hex,
                                0)

        # 0.1 BTC fee is enough
        dbl_tx_hex = self.wallet.create_self_transfer(
            utxo_to_spend=tx0_outpoint,
            sequence=0,
            fee=(Decimal(fee) / COIN) * n + Decimal("0.1"),
        )["hex"]
        self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)

        mempool = self.nodes[0].getrawmempool()

        for txid in tree_txs:
            assert txid not in mempool

        # Try again, but with more total transactions than the "max txs
        # double-spent at once" anti-DoS limit.
        for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2):
            fee = int(0.00001 * COIN)
            tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)
            tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
            assert_equal(len(tree_txs), n)

            dbl_tx_hex = self.wallet.create_self_transfer(
                utxo_to_spend=tx0_outpoint,
                sequence=0,
                fee=2 * (Decimal(fee) / COIN) * n,
            )["hex"]
            # This will raise an exception
            assert_raises_rpc_error(-26, "too many potential replacements",
                                    self.nodes[0].sendrawtransaction,
                                    dbl_tx_hex, 0)

            for txid in tree_txs:
                self.nodes[0].getrawtransaction(txid)

    def test_replacement_feeperkb(self):
        """Replacement requires fee-per-KB to be higher"""
        tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx0_outpoint,
            sequence=0,
            fee=Decimal("0.1"),
        )

        # Higher fee, but the fee per KB is much lower, so the replacement is
        # rejected.
        tx1b_hex = self.wallet.create_self_transfer_multi(
            utxos_to_spend=[tx0_outpoint],
            sequence=0,
            num_outputs=100,
            amount_per_output=1000,
        )["hex"]

        # This will raise an exception due to insufficient fee
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

    def test_spends_of_conflicting_outputs(self):
        """Replacements that spend conflicting tx outputs are rejected"""
        utxo1 = self.make_utxo(self.nodes[0], int(1.2 * COIN))
        utxo2 = self.make_utxo(self.nodes[0], 3 * COIN)

        tx1a_utxo = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=utxo1,
            sequence=0,
            fee=Decimal("0.1"),
        )["new_utxo"]

        # Direct spend an output of the transaction we're replacing.
        tx2_hex = self.wallet.create_self_transfer_multi(
            utxos_to_spend=[utxo1, utxo2, tx1a_utxo],
            sequence=0,
            amount_per_output=int(COIN * tx1a_utxo["value"]),
        )["hex"]

        # This will raise an exception
        assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx",
                                self.nodes[0].sendrawtransaction, tx2_hex, 0)

        # Spend tx1a's output to test the indirect case.
        tx1b_utxo = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx1a_utxo,
            sequence=0,
            fee=Decimal("0.1"),
        )["new_utxo"]

        tx2_hex = self.wallet.create_self_transfer_multi(
            utxos_to_spend=[utxo1, utxo2, tx1b_utxo],
            sequence=0,
            amount_per_output=int(COIN * tx1a_utxo["value"]),
        )["hex"]

        # This will raise an exception
        assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx",
                                self.nodes[0].sendrawtransaction, tx2_hex, 0)

    def test_new_unconfirmed_inputs(self):
        """Replacements that add new unconfirmed inputs are rejected"""
        confirmed_utxo = self.make_utxo(self.nodes[0], int(1.1 * COIN))
        unconfirmed_utxo = self.make_utxo(self.nodes[0],
                                          int(0.1 * COIN),
                                          confirmed=False)

        self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=confirmed_utxo,
            sequence=0,
            fee=Decimal("0.1"),
        )

        tx2_hex = self.wallet.create_self_transfer_multi(
            utxos_to_spend=[confirmed_utxo, unconfirmed_utxo],
            sequence=0,
            amount_per_output=1 * COIN,
        )["hex"]

        # This will raise an exception
        assert_raises_rpc_error(-26, "replacement-adds-unconfirmed",
                                self.nodes[0].sendrawtransaction, tx2_hex, 0)

    def test_too_many_replacements(self):
        """Replacements that evict too many transactions are rejected"""
        # Try directly replacing more than MAX_REPLACEMENT_LIMIT
        # transactions

        # Start by creating a single transaction with many outputs
        initial_nValue = 10 * COIN
        utxo = self.make_utxo(self.nodes[0], initial_nValue)
        fee = int(0.0001 * COIN)
        split_value = int((initial_nValue - fee) / (MAX_REPLACEMENT_LIMIT + 1))

        splitting_tx_utxos = self.wallet.send_self_transfer_multi(
            from_node=self.nodes[0],
            utxos_to_spend=[utxo],
            sequence=0,
            num_outputs=MAX_REPLACEMENT_LIMIT + 1,
            amount_per_output=split_value,
        )["new_utxos"]

        # Now spend each of those outputs individually
        for utxo in splitting_tx_utxos:
            self.wallet.send_self_transfer(
                from_node=self.nodes[0],
                utxo_to_spend=utxo,
                sequence=0,
                fee=Decimal(fee) / COIN,
            )

        # Now create doublespend of the whole lot; should fail.
        # Need a big enough fee to cover all spending transactions and have
        # a higher fee rate
        double_spend_value = (split_value -
                              100 * fee) * (MAX_REPLACEMENT_LIMIT + 1)
        double_tx = self.wallet.create_self_transfer_multi(
            utxos_to_spend=splitting_tx_utxos,
            sequence=0,
            amount_per_output=double_spend_value,
        )["tx"]
        double_tx_hex = double_tx.serialize().hex()

        # This will raise an exception
        assert_raises_rpc_error(-26, "too many potential replacements",
                                self.nodes[0].sendrawtransaction,
                                double_tx_hex, 0)

        # If we remove an input, it should pass
        double_tx.vin.pop()
        double_tx_hex = double_tx.serialize().hex()
        self.nodes[0].sendrawtransaction(double_tx_hex, 0)

    def test_too_many_replacements_with_default_mempool_params(self):
        """
        Test rule 5 of BIP125 (do not allow replacements that cause more than 100
        evictions) without having to rely on non-default mempool parameters.

        In order to do this, create a number of "root" UTXOs, and then hang
        enough transactions off of each root UTXO to exceed the MAX_REPLACEMENT_LIMIT.
        Then create a conflicting RBF replacement transaction.
        """
        normal_node = self.nodes[1]
        wallet = MiniWallet(normal_node)
        wallet.rescan_utxos()
        # Clear mempools to avoid cross-node sync failure.
        for node in self.nodes:
            self.generate(node, 1)

        # This has to be chosen so that the total number of transactions can exceed
        # MAX_REPLACEMENT_LIMIT without having any one tx graph run into the descendant
        # limit; 10 works.
        num_tx_graphs = 10

        # (Number of transactions per graph, BIP125 rule 5 failure expected)
        cases = [
            # Test the base case of evicting fewer than MAX_REPLACEMENT_LIMIT
            # transactions.
            ((MAX_REPLACEMENT_LIMIT // num_tx_graphs) - 1, False),

            # Test hitting the rule 5 eviction limit.
            (MAX_REPLACEMENT_LIMIT // num_tx_graphs, True),
        ]

        for (txs_per_graph, failure_expected) in cases:
            self.log.debug(
                f"txs_per_graph: {txs_per_graph}, failure: {failure_expected}")
            # "Root" utxos of each txn graph that we will attempt to double-spend with
            # an RBF replacement.
            root_utxos = []

            # For each root UTXO, create a package that contains the spend of that
            # UTXO and `txs_per_graph` children tx.
            for graph_num in range(num_tx_graphs):
                root_utxos.append(wallet.get_utxo())

                optin_parent_tx = wallet.send_self_transfer_multi(
                    from_node=normal_node,
                    sequence=BIP125_SEQUENCE_NUMBER,
                    utxos_to_spend=[root_utxos[graph_num]],
                    num_outputs=txs_per_graph,
                )
                assert_equal(
                    True,
                    normal_node.getmempoolentry(
                        optin_parent_tx['txid'])['bip125-replaceable'])
                new_utxos = optin_parent_tx['new_utxos']

                for utxo in new_utxos:
                    # Create spends for each output from the "root" of this graph.
                    child_tx = wallet.send_self_transfer(
                        from_node=normal_node,
                        utxo_to_spend=utxo,
                    )

                    assert normal_node.getmempoolentry(child_tx['txid'])

            num_txs_invalidated = len(root_utxos) + (num_tx_graphs *
                                                     txs_per_graph)

            if failure_expected:
                assert num_txs_invalidated > MAX_REPLACEMENT_LIMIT
            else:
                assert num_txs_invalidated <= MAX_REPLACEMENT_LIMIT

            # Now attempt to submit a tx that double-spends all the root tx inputs, which
            # would invalidate `num_txs_invalidated` transactions.
            tx_hex = wallet.create_self_transfer_multi(
                utxos_to_spend=root_utxos,
                fee_per_output=10_000_000,  # absurdly high feerate
            )["hex"]

            if failure_expected:
                assert_raises_rpc_error(-26, "too many potential replacements",
                                        normal_node.sendrawtransaction, tx_hex,
                                        0)
            else:
                txid = normal_node.sendrawtransaction(tx_hex, 0)
                assert normal_node.getmempoolentry(txid)

        # Clear the mempool once finished, and rescan the other nodes' wallet
        # to account for the spends we've made on `normal_node`.
        self.generate(normal_node, 1)
        self.wallet.rescan_utxos()

    def test_opt_in(self):
        """Replacing should only work if orig tx opted in"""
        tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a non-opting in transaction
        tx1a_utxo = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx0_outpoint,
            sequence=SEQUENCE_FINAL,
            fee=Decimal("0.1"),
        )["new_utxo"]

        # This transaction isn't shown as replaceable
        assert_equal(
            self.nodes[0].getmempoolentry(
                tx1a_utxo["txid"])['bip125-replaceable'], False)

        # Shouldn't be able to double-spend
        tx1b_hex = self.wallet.create_self_transfer(
            utxo_to_spend=tx0_outpoint,
            sequence=0,
            fee=Decimal("0.2"),
        )["hex"]

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

        tx1_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        # Create a different non-opting in transaction
        tx2a_utxo = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx1_outpoint,
            sequence=0xfffffffe,
            fee=Decimal("0.1"),
        )["new_utxo"]

        # Still shouldn't be able to double-spend
        tx2b_hex = self.wallet.create_self_transfer(
            utxo_to_spend=tx1_outpoint,
            sequence=0,
            fee=Decimal("0.2"),
        )["hex"]

        # This will raise an exception
        assert_raises_rpc_error(-26, "txn-mempool-conflict",
                                self.nodes[0].sendrawtransaction, tx2b_hex, 0)

        # Now create a new transaction that spends from tx1a and tx2a
        # opt-in on one of the inputs
        # Transaction should be replaceable on either input

        tx3a_txid = self.wallet.send_self_transfer_multi(
            from_node=self.nodes[0],
            utxos_to_spend=[tx1a_utxo, tx2a_utxo],
            sequence=[SEQUENCE_FINAL, 0xfffffffd],
            fee_per_output=int(0.1 * COIN),
        )["txid"]

        # This transaction is shown as replaceable
        assert_equal(
            self.nodes[0].getmempoolentry(tx3a_txid)['bip125-replaceable'],
            True)

        self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx1a_utxo,
            sequence=0,
            fee=Decimal("0.4"),
        )

        # If tx3b was accepted, tx3c won't look like a replacement,
        # but make sure it is accepted anyway
        self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx2a_utxo,
            sequence=0,
            fee=Decimal("0.4"),
        )

    def test_prioritised_transactions(self):
        # Ensure that fee deltas used via prioritisetransaction are
        # correctly used by replacement logic

        # 1. Check that feeperkb uses modified fees
        tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        tx1a_txid = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx0_outpoint,
            sequence=0,
            fee=Decimal("0.1"),
        )["txid"]

        # Higher fee, but the actual fee per KB is much lower.
        tx1b_hex = self.wallet.create_self_transfer_multi(
            utxos_to_spend=[tx0_outpoint],
            sequence=0,
            num_outputs=100,
            amount_per_output=int(0.00001 * COIN),
        )["hex"]

        # Verify tx1b cannot replace tx1a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

        # Use prioritisetransaction to set tx1a's fee to 0.
        self.nodes[0].prioritisetransaction(txid=tx1a_txid,
                                            fee_delta=int(-0.1 * COIN))

        # Now tx1b should be able to replace tx1a
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)

        assert tx1b_txid in self.nodes[0].getrawmempool()

        # 2. Check that absolute fee checks use modified fee.
        tx1_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))

        # tx2a
        self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=tx1_outpoint,
            sequence=0,
            fee=Decimal("0.1"),
        )

        # Lower fee, but we'll prioritise it
        tx2b = self.wallet.create_self_transfer(
            utxo_to_spend=tx1_outpoint,
            sequence=0,
            fee=Decimal("0.09"),
        )

        # Verify tx2b cannot replace tx2a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx2b["hex"],
                                0)

        # Now prioritise tx2b to have a higher modified fee
        self.nodes[0].prioritisetransaction(txid=tx2b["txid"],
                                            fee_delta=int(0.1 * COIN))

        # tx2b should now be accepted
        tx2b_txid = self.nodes[0].sendrawtransaction(tx2b["hex"], 0)

        assert tx2b_txid in self.nodes[0].getrawmempool()

    def test_rpc(self):
        us0 = self.wallet.get_utxo()
        ins = [us0]
        outs = {ADDRESS_BCRT1_UNSPENDABLE: Decimal(1.0000000)}
        rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True)
        rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False)
        json0 = self.nodes[0].decoderawtransaction(rawtx0)
        json1 = self.nodes[0].decoderawtransaction(rawtx1)
        assert_equal(json0["vin"][0]["sequence"], 4294967293)
        assert_equal(json1["vin"][0]["sequence"], 4294967295)

        if self.is_specified_wallet_compiled():
            self.init_wallet(node=0)
            rawtx2 = self.nodes[0].createrawtransaction([], outs)
            frawtx2a = self.nodes[0].fundrawtransaction(
                rawtx2, {"replaceable": True})
            frawtx2b = self.nodes[0].fundrawtransaction(
                rawtx2, {"replaceable": False})

            json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex'])
            json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex'])
            assert_equal(json0["vin"][0]["sequence"], 4294967293)
            assert_equal(json1["vin"][0]["sequence"], 4294967294)

    def test_no_inherited_signaling(self):
        confirmed_utxo = self.wallet.get_utxo()

        # Create an explicitly opt-in parent transaction
        optin_parent_tx = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=confirmed_utxo,
            sequence=BIP125_SEQUENCE_NUMBER,
            fee_rate=Decimal('0.01'),
        )
        assert_equal(
            True, self.nodes[0].getmempoolentry(
                optin_parent_tx['txid'])['bip125-replaceable'])

        replacement_parent_tx = self.wallet.create_self_transfer(
            utxo_to_spend=confirmed_utxo,
            sequence=BIP125_SEQUENCE_NUMBER,
            fee_rate=Decimal('0.02'),
        )

        # Test if parent tx can be replaced.
        res = self.nodes[0].testmempoolaccept(
            rawtxs=[replacement_parent_tx['hex']])[0]

        # Parent can be replaced.
        assert_equal(res['allowed'], True)

        # Create an opt-out child tx spending the opt-in parent
        parent_utxo = self.wallet.get_utxo(txid=optin_parent_tx['txid'])
        optout_child_tx = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=parent_utxo,
            sequence=SEQUENCE_FINAL,
            fee_rate=Decimal('0.01'),
        )

        # Reports true due to inheritance
        assert_equal(
            True, self.nodes[0].getmempoolentry(
                optout_child_tx['txid'])['bip125-replaceable'])

        replacement_child_tx = self.wallet.create_self_transfer(
            utxo_to_spend=parent_utxo,
            sequence=SEQUENCE_FINAL,
            fee_rate=Decimal('0.02'),
        )

        # Broadcast replacement child tx
        # BIP 125 :
        # 1. The original transactions signal replaceability explicitly or through inheritance as described in the above
        # Summary section.
        # The original transaction (`optout_child_tx`) doesn't signal RBF but its parent (`optin_parent_tx`) does.
        # The replacement transaction (`replacement_child_tx`) should be able to replace the original transaction.
        # See CVE-2021-31876 for further explanations.
        assert_equal(
            True, self.nodes[0].getmempoolentry(
                optin_parent_tx['txid'])['bip125-replaceable'])
        assert_raises_rpc_error(-26, 'txn-mempool-conflict',
                                self.nodes[0].sendrawtransaction,
                                replacement_child_tx["hex"], 0)

        self.log.info(
            'Check that the child tx can still be replaced (via a tx that also replaces the parent)'
        )
        replacement_parent_tx = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=confirmed_utxo,
            sequence=SEQUENCE_FINAL,
            fee_rate=Decimal('0.03'),
        )
        # Check that child is removed and update wallet utxo state
        assert_raises_rpc_error(-5, 'Transaction not in mempool',
                                self.nodes[0].getmempoolentry,
                                optout_child_tx['txid'])
        self.wallet.get_utxo(txid=optout_child_tx['txid'])

    def test_replacement_relay_fee(self):
        tx = self.wallet.send_self_transfer(from_node=self.nodes[0])['tx']

        # Higher fee, higher feerate, different txid, but the replacement does not provide a relay
        # fee conforming to node's `incrementalrelayfee` policy of 1000 sat per KB.
        assert_equal(self.nodes[0].getmempoolinfo()["incrementalrelayfee"],
                     Decimal("0.00001"))
        tx.vout[0].nValue -= 1
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction,
                                tx.serialize().hex())

    def test_fullrbf(self):
        txid = self.wallet.send_self_transfer(from_node=self.nodes[0])['txid']
        self.generate(self.nodes[0], 1)
        confirmed_utxo = self.wallet.get_utxo(txid=txid)

        self.restart_node(0, extra_args=["-mempoolfullrbf=1"])

        # Create an explicitly opt-out transaction
        optout_tx = self.wallet.send_self_transfer(
            from_node=self.nodes[0],
            utxo_to_spend=confirmed_utxo,
            sequence=SEQUENCE_FINAL,
            fee_rate=Decimal('0.01'),
        )
        assert_equal(
            False, self.nodes[0].getmempoolentry(
                optout_tx['txid'])['bip125-replaceable'])

        conflicting_tx = self.wallet.create_self_transfer(
            utxo_to_spend=confirmed_utxo,
            sequence=SEQUENCE_FINAL,
            fee_rate=Decimal('0.02'),
        )

        # Send the replacement transaction, conflicting with the optout_tx.
        self.nodes[0].sendrawtransaction(conflicting_tx['hex'], 0)

        # Optout_tx is not anymore in the mempool.
        assert optout_tx['txid'] not in self.nodes[0].getrawmempool()