class FullBlockTest(ComparisonTestFramework):

    # Can either run this test as 1 node with expected answers, or two and compare them.
    # Change the "outcome" variable from each TestInstance object to only do
    # the comparison.

    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.tip = None
        self.blocks = {}
        self.extra_args = [['-minrelaytxfee=0']]

    def add_options(self, parser):
        super().add_options(parser)
        parser.add_option("--runbarelyexpensive",
                          dest="runbarelyexpensive",
                          default=True)

    def run_test(self):
        self.test.run()

    def get_tests(self):
        # shorthand for functions
        block = self.chain.next_block
        update_block = self.chain.update_block
        save_spendable_output = self.chain.save_spendable_output
        get_spendable_output = self.chain.get_spendable_output
        accepted = self.accepted

        # shorthand for variables
        node = self.nodes[0]
        self.chain.set_genesis_hash(int(node.getbestblockhash(), 16))
        # Create a new block
        block(0)
        yield accepted()

        test, out, _ = prepare_init_chain(self.chain, 99, 33)

        yield test

        # P2SH
        # Build the redeem script, hash it, use hash to create the p2sh script
        redeem_script = CScript([self.coinbase_pubkey] +
                                [OP_2DUP, OP_CHECKSIGVERIFY] * 5 +
                                [OP_CHECKSIG])
        redeem_script_hash = hash160(redeem_script)
        p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])

        # Creates a new transaction using a p2sh transaction as input
        def spend_p2sh_tx(p2sh_tx_to_spend, output_script=CScript([OP_TRUE])):
            # Create the transaction
            spent_p2sh_tx = CTransaction()
            spent_p2sh_tx.vin.append(
                CTxIn(COutPoint(p2sh_tx_to_spend.sha256, 0), b''))
            spent_p2sh_tx.vout.append(CTxOut(1, output_script))
            # Sign the transaction using the redeem script
            sighash = SignatureHashForkId(redeem_script, spent_p2sh_tx, 0,
                                          SIGHASH_ALL | SIGHASH_FORKID,
                                          p2sh_tx_to_spend.vout[0].nValue)
            sig = self.coinbase_key.sign(sighash) + bytes(
                bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            spent_p2sh_tx.vin[0].scriptSig = CScript([sig, redeem_script])
            spent_p2sh_tx.rehash()
            return spent_p2sh_tx

        # P2SH tests
        # Create a p2sh transaction
        p2sh_tx = create_and_sign_transaction(out[0].tx, out[0].n, 1,
                                              p2sh_script, self.coinbase_key)

        # Add the transaction to the block
        block(1)
        update_block(1, [p2sh_tx])
        yield accepted()

        # Sigops p2sh limit for the mempool test
        p2sh_sigops_limit_mempool = MAX_TX_SIGOPS_COUNT_POLICY_BEFORE_GENESIS - \
            redeem_script.GetSigOpCount(True)
        # Too many sigops in one p2sh script
        too_many_p2sh_sigops_mempool = CScript([OP_CHECKSIG] *
                                               (p2sh_sigops_limit_mempool + 1))

        # A transaction with this output script can't get into the mempool
        assert_raises_rpc_error(
            -26, RPC_TXNS_TOO_MANY_SIGOPS_ERROR, node.sendrawtransaction,
            ToHex(spend_p2sh_tx(p2sh_tx, too_many_p2sh_sigops_mempool)))

        # The transaction is rejected, so the mempool should still be empty
        assert_equal(set(node.getrawmempool()), set())

        # Max sigops in one p2sh txn
        max_p2sh_sigops_mempool = CScript([OP_CHECKSIG] *
                                          (p2sh_sigops_limit_mempool))

        # A transaction with this output script can get into the mempool
        max_p2sh_sigops_txn = spend_p2sh_tx(p2sh_tx, max_p2sh_sigops_mempool)
        max_p2sh_sigops_txn_id = node.sendrawtransaction(
            ToHex(max_p2sh_sigops_txn))
        assert_equal(set(node.getrawmempool()), {max_p2sh_sigops_txn_id})

        # Mine the transaction
        block(2, spend=out[1])
        update_block(2, [max_p2sh_sigops_txn])
        yield accepted()

        # The transaction has been mined, it's not in the mempool anymore
        assert_equal(set(node.getrawmempool()), set())
Esempio n. 2
0
class FullBlockTest(ComparisonTestFramework):
    # Can either run this test as 1 node with expected answers, or two and compare them.
    # Change the "outcome" variable from each TestInstance object to only do the comparison.
    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.tip = None
        self.blocks = {}

    def add_options(self, parser):
        super().add_options(parser)
        parser.add_option("--runbarelyexpensive",
                          dest="runbarelyexpensive",
                          default=True)

    def run_test(self):
        self.test.run()

    def get_tests(self):
        # shorthand for functions
        block = lambda *a, **kw: self.chain.next_block(
            *a, coinbase_key=self.coinbase_key, simple_output=True, **kw)
        create_and_sign_tx = lambda *a, **kw: create_and_sign_transaction(
            *a,
            private_key=self.coinbase_key,
            **({k: v
                for k, v in kw.items() if not k == 'private_key'}))
        update_block = self.chain.update_block
        tip = self.chain.set_tip
        accepted = self.accepted
        rejected = self.rejected

        self.chain.set_genesis_hash(int(self.nodes[0].getbestblockhash(), 16))
        save_spendable_output = self.chain.save_spendable_output
        get_spendable_output = self.chain.get_spendable_output

        # Create a new block
        block(0)
        yield accepted()

        test, out, _ = prepare_init_chain(self.chain, 99, 33)

        yield test

        # Start by building a couple of blocks on top (which output is spent is
        # in parentheses):
        #     genesis -> b1 (0) -> b2 (1)
        block(1, spend=out[0])
        save_spendable_output()
        yield accepted()

        block(2, spend=out[1])
        yield accepted()
        save_spendable_output()

        # so fork like this:
        #
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1)
        #
        # Nothing should happen at this point. We saw b2 first so it takes
        # priority.
        tip(1)
        b3 = block(3, spend=out[1])
        txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0)
        yield rejected()

        # Now we add another block to make the alternative chain longer.
        #
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1) -> b4 (2)
        block(4, spend=out[2])
        yield accepted()

        # ... and back to the first chain.
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                      \-> b3 (1) -> b4 (2)
        tip(2)
        block(5, spend=out[2])
        save_spendable_output()
        yield rejected()

        block(6, spend=out[3])
        yield accepted()

        # Try to create a fork that double-spends
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                          \-> b7 (2) -> b8 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(7, spend=out[2])
        yield rejected()

        block(8, spend=out[4])
        yield rejected()

        # Try to create a block that has too much fee
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                                    \-> b9 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(6)
        block(9, spend=out[4], additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))

        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b10 (3) -> b11 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(10, spend=out[3])
        yield rejected()

        block(11, spend=out[4], additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))

        # Try again, but with a valid fork first
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
        #                                              (b12 added last)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        b12 = block(12, spend=out[3])
        save_spendable_output()
        b13 = block(13, spend=out[4])
        # Deliver the block header for b12, and the block b13.
        # b13 should be accepted but the tip won't advance until b12 is
        # delivered.
        yield TestInstance([[CBlockHeader(b12), None], [b13, False]])

        save_spendable_output()
        # b14 is invalid, but the node won't know that until it tries to connect
        # Tip still can't advance because b12 is missing
        block(14, spend=out[5], additional_coinbase_value=1)
        yield rejected()

        yield TestInstance([[b12, True, b13.sha256]])  # New tip should be b13.

        # Add a block with MAX_BLOCK_SIGOPS_PER_MB and one with one more sigop
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
        #                      \-> b3 (1) -> b4 (2)

        # Test that a block with a lot of checksigs is okay
        lots_of_checksigs = CScript([OP_CHECKSIG] *
                                    (MAX_BLOCK_SIGOPS_PER_MB - 1))
        tip(13)
        block(15, spend=out[5], script=lots_of_checksigs)
        yield accepted()
        save_spendable_output()

        # Test that a block with too many checksigs is rejected
        too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB))
        block(16, spend=out[6], script=too_many_checksigs)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Attempt to spend a transaction created on a different fork
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        block(17, spend=txout_b3)
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # Attempt to spend a transaction created on a different fork (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b18 (b3.vtx[1]) -> b19 (6)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(18, spend=txout_b3)
        yield rejected()

        block(19, spend=out[6])
        yield rejected()

        # Attempt to spend a coinbase at depth too low
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        block(20, spend=out[7])
        yield rejected(
            RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))

        # Attempt to spend a coinbase at depth too low (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b21 (6) -> b22 (5)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(21, spend=out[6])
        yield rejected()

        block(22, spend=out[5])
        yield rejected()

        # Create a block on either side of LEGACY_MAX_BLOCK_SIZE and make sure its accepted/rejected
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
        #                                                                           \-> b24 (6) -> b25 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        b23 = block(23, spend=out[6])
        tx = CTransaction()
        script_length = LEGACY_MAX_BLOCK_SIZE - len(b23.serialize()) - 69
        script_output = CScript([b'\x00' * script_length])
        tx.vout.append(CTxOut(0, script_output))
        tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0)))
        b23 = update_block(23, [tx])
        # Make sure the math above worked out to produce a max-sized block
        assert_equal(len(b23.serialize()), LEGACY_MAX_BLOCK_SIZE)
        yield accepted()
        save_spendable_output()

        # Create blocks with a coinbase input script size out of range
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
        #                                                                           \-> ... (6) -> ... (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        b26 = block(26, spend=out[6])
        b26.vtx[0].vin[0].scriptSig = b'\x00'
        b26.vtx[0].rehash()
        # update_block causes the merkle root to get updated, even with no new
        # transactions, and updates the required state.
        b26 = update_block(26, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b26 chain to make sure bitcoind isn't accepting b26
        b27 = block(27, spend=out[7])
        yield rejected(False)

        # Now try a too-large-coinbase script
        tip(15)
        b28 = block(28, spend=out[6])
        b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
        b28.vtx[0].rehash()
        b28 = update_block(28, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b28 chain to make sure bitcoind isn't accepting b28
        b29 = block(29, spend=out[7])
        yield rejected(False)

        # b30 has a max-sized coinbase scriptSig.
        tip(23)
        b30 = block(30)
        b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
        b30.vtx[0].rehash()
        b30 = update_block(30, [])
        yield accepted()
        save_spendable_output()

        # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY
        #
        #     genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10)
        #                                                                \-> b36 (11)
        #                                                    \-> b34 (10)
        #                                         \-> b32 (9)
        #

        # MULTISIG: each op code counts as 20 sigops.  To create the edge case,
        # pack another 19 sigops at the end.
        lots_of_multisigs = CScript([OP_CHECKMULTISIG] *
                                    ((MAX_BLOCK_SIGOPS_PER_MB - 1) // 20) +
                                    [OP_CHECKSIG] * 19)
        b31 = block(31, spend=out[8], script=lots_of_multisigs)
        assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS_PER_MB)
        yield accepted()
        save_spendable_output()

        # this goes over the limit because the coinbase has one sigop
        too_many_multisigs = CScript([OP_CHECKMULTISIG] *
                                     (MAX_BLOCK_SIGOPS_PER_MB // 20))
        b32 = block(32, spend=out[9], script=too_many_multisigs)
        assert_equal(get_legacy_sigopcount_block(b32),
                     MAX_BLOCK_SIGOPS_PER_MB + 1)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # CHECKMULTISIGVERIFY
        tip(31)
        lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] *
                                    ((MAX_BLOCK_SIGOPS_PER_MB - 1) // 20) +
                                    [OP_CHECKSIG] * 19)
        block(33, spend=out[9], script=lots_of_multisigs)
        yield accepted()
        save_spendable_output()

        too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] *
                                     (MAX_BLOCK_SIGOPS_PER_MB // 20))
        block(34, spend=out[10], script=too_many_multisigs)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # CHECKSIGVERIFY
        tip(33)
        lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] *
                                    (MAX_BLOCK_SIGOPS_PER_MB - 1))
        b35 = block(35, spend=out[10], script=lots_of_checksigs)
        yield accepted()
        save_spendable_output()

        too_many_checksigs = CScript([OP_CHECKSIGVERIFY] *
                                     (MAX_BLOCK_SIGOPS_PER_MB))
        block(36, spend=out[11], script=too_many_checksigs)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Check spending of a transaction in a block which failed to connect
        #
        # b6  (3)
        # b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10)
        #                                                                                     \-> b37 (11)
        #                                                                                     \-> b38 (11/37)
        #

        # save 37's spendable output, but then double-spend out11 to invalidate
        # the block
        tip(35)
        b37 = block(37, spend=out[11])
        txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0)
        tx = create_and_sign_tx(out[11].tx, out[11].n, 0)
        b37 = update_block(37, [tx])
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # attempt to spend b37's first non-coinbase tx, at which point b37 was
        # still considered valid
        tip(35)
        block(38, spend=txout_b37)
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # Check P2SH SigOp counting
        #
        #
        #   13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12)
        #                                                                                        \-> b40 (12)
        #
        # b39 - create some P2SH outputs that will require 6 sigops to spend:
        #
        #           redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG
        #           p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL
        #
        tip(35)
        b39 = block(39)
        b39_outputs = 0
        b39_sigops_per_output = 6

        # Build the redeem script, hash it, use hash to create the p2sh script
        redeem_script = CScript([self.coinbase_pubkey] +
                                [OP_2DUP, OP_CHECKSIGVERIFY] * 5 +
                                [OP_CHECKSIG])
        redeem_script_hash = hash160(redeem_script)
        p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])

        # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE
        # This must be signed because it is spending a coinbase
        spend = out[11]
        tx = create_tx(spend.tx, spend.n, 1, p2sh_script)
        tx.vout.append(
            CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE])))
        sign_tx(tx, spend.tx, spend.n, self.coinbase_key)
        tx.rehash()
        b39 = update_block(39, [tx])
        b39_outputs += 1

        # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest
        # to OP_TRUE
        tx_new = None
        tx_last = tx
        total_size = len(b39.serialize())
        while (total_size < LEGACY_MAX_BLOCK_SIZE):
            tx_new = create_tx(tx_last, 1, 1, p2sh_script)
            tx_new.vout.append(
                CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE])))
            tx_new.rehash()
            total_size += len(tx_new.serialize())
            if total_size >= LEGACY_MAX_BLOCK_SIZE:
                break
            b39.vtx.append(tx_new)  # add tx to block
            tx_last = tx_new
            b39_outputs += 1

        b39 = update_block(39, [])
        yield accepted()
        save_spendable_output()

        # Test sigops in P2SH redeem scripts
        #
        # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops.
        # The first tx has one sigop and then at the end we add 2 more to put us just over the max.
        #
        # b41 does the same, less one, so it has the maximum sigops permitted.
        #
        tip(39)
        b40 = block(40, spend=out[12])
        sigops = get_legacy_sigopcount_block(b40)
        numTxes = (MAX_BLOCK_SIGOPS_PER_MB - sigops) // b39_sigops_per_output
        assert_equal(numTxes <= b39_outputs, True)

        lastOutpoint = COutPoint(b40.vtx[1].sha256, 0)
        lastAmount = b40.vtx[1].vout[0].nValue
        new_txs = []
        for i in range(1, numTxes + 1):
            tx = CTransaction()
            tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            tx.vin.append(CTxIn(lastOutpoint, b''))
            # second input is corresponding P2SH output from b39
            tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b''))
            # Note: must pass the redeem_script (not p2sh_script) to the
            # signature hash function
            sighash = SignatureHashForkId(redeem_script, tx, 1,
                                          SIGHASH_ALL | SIGHASH_FORKID,
                                          lastAmount)
            sig = self.coinbase_key.sign(sighash) + bytes(
                bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            scriptSig = CScript([sig, redeem_script])

            tx.vin[1].scriptSig = scriptSig
            tx.rehash()
            new_txs.append(tx)
            lastOutpoint = COutPoint(tx.sha256, 0)
            lastAmount = tx.vout[0].nValue

        b40_sigops_to_fill = MAX_BLOCK_SIGOPS_PER_MB - \
            (numTxes * b39_sigops_per_output + sigops) + 1
        tx = CTransaction()
        tx.vin.append(CTxIn(lastOutpoint, b''))
        tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill)))
        tx.rehash()
        new_txs.append(tx)
        update_block(40, new_txs)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # same as b40, but one less sigop
        tip(39)
        b41 = block(41, spend=None)
        update_block(41, b40.vtx[1:-1])
        b41_sigops_to_fill = b40_sigops_to_fill - 1
        tx = CTransaction()
        tx.vin.append(CTxIn(lastOutpoint, b''))
        tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill)))
        tx.rehash()
        update_block(41, [tx])
        yield accepted()

        # Fork off of b39 to create a constant base again
        #
        # b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13)
        #                                                                  \-> b41 (12)
        #
        tip(39)
        block(42, spend=out[12])
        yield rejected()
        save_spendable_output()

        block(43, spend=out[13])
        yield accepted()
        save_spendable_output()

        # Test a number of really invalid scenarios
        #
        #  -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14)
        #                                                                                   \-> ??? (15)

        # The next few blocks are going to be created "by hand" since they'll do funky things, such as having
        # the first transaction be non-coinbase, etc.  The purpose of b44 is to
        # make sure this works.
        height = self.chain.block_heights[self.chain.tip.sha256] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        b44 = CBlock()
        b44.nTime = self.chain.tip.nTime + 1
        b44.hashPrevBlock = self.chain.tip.sha256
        b44.nBits = 0x207fffff
        b44.vtx.append(coinbase)
        b44.hashMerkleRoot = b44.calc_merkle_root()
        b44.solve()
        self.chain.tip = b44
        self.chain.block_heights[b44.sha256] = height
        self.chain.blocks[44] = b44
        yield accepted()

        # A block with a non-coinbase as the first tx
        non_coinbase = create_tx(out[15].tx, out[15].n, 1)
        b45 = CBlock()
        b45.nTime = self.chain.tip.nTime + 1
        b45.hashPrevBlock = self.chain.tip.sha256
        b45.nBits = 0x207fffff
        b45.vtx.append(non_coinbase)
        b45.hashMerkleRoot = b45.calc_merkle_root()
        b45.calc_sha256()
        b45.solve()
        self.chain.block_heights[
            b45.sha256] = self.chain.block_heights[self.chain.tip.sha256] + 1
        self.chain.tip = b45
        self.chain.blocks[45] = b45
        yield rejected(RejectResult(16, b'bad-cb-missing'))

        # A block with no txns
        tip(44)
        b46 = CBlock()
        b46.nTime = b44.nTime + 1
        b46.hashPrevBlock = b44.sha256
        b46.nBits = 0x207fffff
        b46.vtx = []
        b46.hashMerkleRoot = 0
        b46.solve()
        self.chain.block_heights[
            b46.sha256] = self.chain.block_heights[b44.sha256] + 1
        self.chain.tip = b46
        assert 46 not in self.chain.blocks
        self.chain.blocks[46] = b46
        s = ser_uint256(b46.hashMerkleRoot)
        yield rejected(RejectResult(16, b'bad-cb-missing'))

        # A block with invalid work
        tip(44)
        b47 = block(47, do_solve_block=False)
        target = uint256_from_compact(b47.nBits)
        while b47.sha256 < target:  # changed > to <
            b47.nNonce += 1
            b47.rehash()
        yield rejected(RejectResult(16, b'high-hash'))

        # A block with timestamp > 2 hrs in the future
        tip(44)
        b48 = block(48, do_solve_block=False)
        b48.nTime = int(time.time()) + 60 * 60 * 3
        b48.solve()
        yield rejected(RejectResult(16, b'time-too-new'))

        # A block with an invalid merkle hash
        tip(44)
        b49 = block(49)
        b49.hashMerkleRoot += 1
        b49.solve()
        yield rejected(RejectResult(16, b'bad-txnmrklroot'))

        # A block with an incorrect POW limit
        tip(44)
        b50 = block(50)
        b50.nBits = b50.nBits - 1
        b50.solve()
        yield rejected(RejectResult(16, b'bad-diffbits'))

        # A block with two coinbase txns
        tip(44)
        b51 = block(51)
        cb2 = create_coinbase(51, self.coinbase_pubkey)
        b51 = update_block(51, [cb2])
        yield rejected(RejectResult(16, b'bad-tx-coinbase'))

        # A block w/ duplicate txns
        # Note: txns have to be in the right position in the merkle tree to
        # trigger this error
        tip(44)
        b52 = block(52, spend=out[15])
        tx = create_tx(b52.vtx[1], 0, 1)
        b52 = update_block(52, [tx, tx])
        yield rejected(RejectResult(16, b'bad-txns-duplicate'))

        # Test block timestamps
        #  -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15)
        #                                                                                   \-> b54 (15)
        #
        tip(43)
        block(53, spend=out[14])
        yield rejected()  # rejected since b44 is at same height
        save_spendable_output()

        # invalid timestamp (b35 is 5 blocks back, so its time is
        # MedianTimePast)
        b54 = block(54, spend=out[15])
        b54.nTime = b35.nTime - 1
        b54.solve()
        yield rejected(RejectResult(16, b'time-too-old'))

        # valid timestamp
        tip(53)
        b55 = block(55, spend=out[15])
        b55.nTime = b35.nTime
        update_block(55, [])
        yield accepted()
        save_spendable_output()

        # Test CVE-2012-2459
        #
        # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16)
        #                                                \-> b57   (16)
        #                                                \-> b56p2 (16)
        #                                                \-> b56   (16)
        #
        # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without
        #                           affecting the merkle root of a block, while still invalidating it.
        #                           See:  src/consensus/merkle.h
        #
        #  b57 has three txns:  coinbase, tx, tx1.  The merkle root computation will duplicate tx.
        #  Result:  OK
        #
        #  b56 copies b57 but duplicates tx1 and does not recalculate the block hash.  So it has a valid merkle
        #  root but duplicate transactions.
        #  Result:  Fails
        #
        #  b57p2 has six transactions in its merkle tree:
        #       - coinbase, tx, tx1, tx2, tx3, tx4
        #  Merkle root calculation will duplicate as necessary.
        #  Result:  OK.
        #
        #  b56p2 copies b57p2 but adds both tx3 and tx4.  The purpose of the test is to make sure the code catches
        #  duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates
        #  that the error was caught early, avoiding a DOS vulnerability.)

        # b57 - a good block with 2 txs, don't submit until end
        tip(55)
        b57 = block(57)
        tx = create_and_sign_tx(out[16].tx, out[16].n, 1)
        tx1 = create_tx(tx, 0, 1)
        b57 = update_block(57, [tx, tx1])

        # b56 - copy b57, add a duplicate tx
        tip(55)
        b56 = copy.deepcopy(b57)
        self.chain.blocks[56] = b56
        assert_equal(len(b56.vtx), 3)
        b56 = update_block(56, [tx1])
        assert_equal(b56.hash, b57.hash)
        yield rejected(RejectResult(16, b'bad-txns-duplicate'))

        # b57p2 - a good block with 6 tx'es, don't submit until end
        tip(55)
        b57p2 = block("57p2")
        tx = create_and_sign_tx(out[16].tx, out[16].n, 1)
        tx1 = create_tx(tx, 0, 1)
        tx2 = create_tx(tx1, 0, 1)
        tx3 = create_tx(tx2, 0, 1)
        tx4 = create_tx(tx3, 0, 1)
        b57p2 = update_block("57p2", [tx, tx1, tx2, tx3, tx4])

        # b56p2 - copy b57p2, duplicate two non-consecutive tx's
        tip(55)
        b56p2 = copy.deepcopy(b57p2)
        self.chain.blocks["b56p2"] = b56p2
        assert_equal(b56p2.hash, b57p2.hash)
        assert_equal(len(b56p2.vtx), 6)
        b56p2 = update_block("b56p2", [tx3, tx4])
        yield rejected(RejectResult(16, b'bad-txns-duplicate'))

        tip("57p2")
        yield accepted()

        tip(57)
        yield rejected()  # rejected because 57p2 seen first
        save_spendable_output()

        # Test a few invalid tx types
        #
        # -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                    \-> ??? (17)
        #

        # tx with prevout.n out of range
        tip(57)
        b58 = block(58, spend=out[17])
        tx = CTransaction()
        assert (len(out[17].tx.vout) < 42)
        tx.vin.append(
            CTxIn(COutPoint(out[17].tx.sha256, 42), CScript([OP_TRUE]),
                  0xffffffff))
        tx.vout.append(CTxOut(0, b""))
        tx.calc_sha256()
        b58 = update_block(58, [tx])
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # tx with output value > input value out of range
        tip(57)
        b59 = block(59)
        tx = create_and_sign_tx(out[17].tx, out[17].n, 51 * COIN)
        b59 = update_block(59, [tx])
        yield rejected(RejectResult(16, b'bad-txns-in-belowout'))

        # reset to good chain
        tip(57)
        b60 = block(60, spend=out[17])
        yield accepted()
        save_spendable_output()

        # Test BIP30
        #
        # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                    \-> b61 (18)
        #
        # Blocks are not allowed to contain a transaction whose id matches that of an earlier,
        # not-fully-spent transaction in the same chain. To test, make identical coinbases;
        # the second one should be rejected.
        #
        tip(60)
        b61 = block(61, spend=out[18])
        b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[
            0].scriptSig  # equalize the coinbases
        b61.vtx[0].rehash()
        b61 = update_block(61, [])
        assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize())
        yield rejected(RejectResult(16, b'bad-txns-BIP30'))

        # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests)
        #
        #   -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                     \-> b62 (18)
        #
        tip(60)
        b62 = block(62)
        tx = CTransaction()
        tx.nLockTime = 0xffffffff  # this locktime is non-final
        assert (out[18].n < len(out[18].tx.vout))
        tx.vin.append(CTxIn(COutPoint(out[18].tx.sha256,
                                      out[18].n)))  # don't set nSequence
        tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
        assert (tx.vin[0].nSequence < 0xffffffff)
        tx.calc_sha256()
        b62 = update_block(62, [tx])
        yield rejected(RejectResult(16, b'bad-txns-nonfinal'))

        # Test a non-final coinbase is also rejected
        #
        #   -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                     \-> b63 (-)
        #
        tip(60)
        b63 = block(63)
        b63.vtx[0].nLockTime = 0xffffffff
        b63.vtx[0].vin[0].nSequence = 0xDEADBEEF
        b63.vtx[0].rehash()
        b63 = update_block(63, [])
        yield rejected(RejectResult(16, b'bad-txns-nonfinal'))

        #  This checks that a block with a bloated VARINT between the block_header and the array of tx such that
        #  the block is > LEGACY_MAX_BLOCK_SIZE with the bloated varint, but <= LEGACY_MAX_BLOCK_SIZE without the bloated varint,
        #  does not cause a subsequent, identical block with canonical encoding to be rejected.  The test does not
        #  care whether the bloated block is accepted or rejected; it only cares that the second block is accepted.
        #
        #  What matters is that the receiving node should not reject the bloated block, and then reject the canonical
        #  block on the basis that it's the same as an already-rejected block (which would be a consensus failure.)
        #
        #  -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18)
        #                                                                                        \
        #                                                                                         b64a (18)
        #  b64a is a bloated block (non-canonical varint)
        #  b64 is a good block (same as b64 but w/ canonical varint)
        #
        tip(60)
        regular_block = block("64a", spend=out[18])

        # make it a "broken_block," with non-canonical serialization
        b64a = CBrokenBlock(regular_block)
        b64a.initialize(regular_block)
        self.chain.blocks["64a"] = b64a
        self.chain.tip = b64a
        tx = CTransaction()

        # use canonical serialization to calculate size
        script_length = LEGACY_MAX_BLOCK_SIZE - \
            len(b64a.normal_serialize()) - 69
        script_output = CScript([b'\x00' * script_length])
        tx.vout.append(CTxOut(0, script_output))
        tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0)))
        b64a = update_block("64a", [tx])
        assert_equal(len(b64a.serialize()), LEGACY_MAX_BLOCK_SIZE + 8)
        yield TestInstance([[self.chain.tip, None]])

        # comptool workaround: to make sure b64 is delivered, manually erase
        # b64a from blockstore
        self.test.block_store.erase(b64a.sha256)

        tip(60)
        b64 = CBlock(b64a)
        b64.vtx = copy.deepcopy(b64a.vtx)
        assert_equal(b64.hash, b64a.hash)
        assert_equal(len(b64.serialize()), LEGACY_MAX_BLOCK_SIZE)
        self.chain.blocks[64] = b64
        update_block(64, [])
        yield accepted()
        save_spendable_output()

        # Spend an output created in the block itself
        #
        # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
        #
        tip(64)
        b65 = block(65)
        tx1 = create_and_sign_tx(out[19].tx, out[19].n,
                                 out[19].tx.vout[0].nValue)
        tx2 = create_and_sign_tx(tx1, 0, 0)
        update_block(65, [tx1, tx2])
        yield accepted()
        save_spendable_output()

        # Attempt to spend an output created later in the same block
        #
        # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
        #                                                                                    \-> b66 (20)
        tip(65)
        b66 = block(66)
        tx1 = create_and_sign_tx(out[20].tx, out[20].n,
                                 out[20].tx.vout[0].nValue)
        tx2 = create_and_sign_tx(tx1, 0, 1)
        update_block(66, [tx2, tx1])
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # Attempt to double-spend a transaction created in a block
        #
        # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
        #                                                                                    \-> b67 (20)
        #
        #
        tip(65)
        b67 = block(67)
        tx1 = create_and_sign_tx(out[20].tx, out[20].n,
                                 out[20].tx.vout[0].nValue)
        tx2 = create_and_sign_tx(tx1, 0, 1)
        tx3 = create_and_sign_tx(tx1, 0, 2)
        update_block(67, [tx1, tx2, tx3])
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # More tests of block subsidy
        #
        # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20)
        #                                                                                    \-> b68 (20)
        #
        # b68 - coinbase with an extra 10 satoshis,
        #       creates a tx that has 9 satoshis from out[20] go to fees
        #       this fails because the coinbase is trying to claim 1 satoshi too much in fees
        #
        # b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee
        #       this succeeds
        #
        tip(65)
        b68 = block(68, additional_coinbase_value=10)
        tx = create_and_sign_tx(out[20].tx, out[20].n,
                                out[20].tx.vout[0].nValue - 9)
        update_block(68, [tx])
        yield rejected(RejectResult(16, b'bad-cb-amount'))

        tip(65)
        b69 = block(69, additional_coinbase_value=10)
        tx = create_and_sign_tx(out[20].tx, out[20].n,
                                out[20].tx.vout[0].nValue - 10)
        update_block(69, [tx])
        yield accepted()
        save_spendable_output()

        # Test spending the outpoint of a non-existent transaction
        #
        # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20)
        #                                                                                    \-> b70 (21)
        #
        tip(69)
        block(70, spend=out[21])
        bogus_tx = CTransaction()
        bogus_tx.sha256 = uint256_from_str(
            b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c"
        )
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff))
        tx.vout.append(CTxOut(1, b""))
        update_block(70, [tx])
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks)
        #
        #  -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21)
        #                                                                                      \-> b71 (21)
        #
        # b72 is a good block.
        # b71 is a copy of 72, but re-adds one of its transactions.  However, it has the same hash as b71.
        #
        tip(69)
        b72 = block(72)
        tx1 = create_and_sign_tx(out[21].tx, out[21].n, 2)
        tx2 = create_and_sign_tx(tx1, 0, 1)
        b72 = update_block(72, [tx1, tx2])  # now tip is 72
        b71 = copy.deepcopy(b72)
        b71.vtx.append(tx2)  # add duplicate tx2
        self.chain.block_heights[b71.sha256] = self.chain.block_heights[
            b69.sha256] + 1  # b71 builds off b69
        self.chain.blocks[71] = b71

        assert_equal(len(b71.vtx), 4)
        assert_equal(len(b72.vtx), 3)
        assert_equal(b72.sha256, b71.sha256)

        tip(71)
        yield rejected(RejectResult(16, b'bad-txns-duplicate'))
        tip(72)
        yield accepted()
        save_spendable_output()

        # Test some invalid scripts and MAX_BLOCK_SIGOPS_PER_MB
        #
        # -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21)
        #                                                                                    \-> b** (22)
        #

        # b73 - tx with excessive sigops that are placed after an excessively large script element.
        #       The purpose of the test is to make sure those sigops are counted.
        #
        #       script is a bytearray of size 20,526
        #
        #       bytearray[0-19,998]     : OP_CHECKSIG
        #       bytearray[19,999]       : OP_PUSHDATA4
        #       bytearray[20,000-20,003]: 521  (max_script_element_size_before_genesis+1, in little-endian format)
        #       bytearray[20,004-20,525]: unread data (script_element)
        #       bytearray[20,526]       : OP_CHECKSIG (this puts us over the limit)
        #
        tip(72)
        b73 = block(73)
        size = MAX_BLOCK_SIGOPS_PER_MB - 1 + \
            MAX_SCRIPT_ELEMENT_SIZE_BEFORE_GENESIS + 1 + 5 + 1
        a = bytearray([OP_CHECKSIG] * size)
        a[MAX_BLOCK_SIGOPS_PER_MB - 1] = int("4e", 16)  # OP_PUSHDATA4

        element_size = MAX_SCRIPT_ELEMENT_SIZE_BEFORE_GENESIS + 1
        a[MAX_BLOCK_SIGOPS_PER_MB] = element_size % 256
        a[MAX_BLOCK_SIGOPS_PER_MB + 1] = element_size // 256
        a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0
        a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0

        tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
        b73 = update_block(73, [tx])
        assert_equal(get_legacy_sigopcount_block(b73),
                     MAX_BLOCK_SIGOPS_PER_MB + 1)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # b74/75 - if we push an invalid script element, all prevous sigops are counted,
        #          but sigops after the element are not counted.
        #
        #       The invalid script element is that the push_data indicates that
        #       there will be a large amount of data (0xffffff bytes), but we only
        #       provide a much smaller number.  These bytes are CHECKSIGS so they would
        #       cause b75 to fail for excessive sigops, if those bytes were counted.
        #
        #       b74 fails because we put MAX_BLOCK_SIGOPS_PER_MB+1 before the element
        #       b75 succeeds because we put MAX_BLOCK_SIGOPS_PER_MB before the element
        #
        #
        tip(72)
        b74 = block(74)
        size = MAX_BLOCK_SIGOPS_PER_MB - 1 + \
            MAX_SCRIPT_ELEMENT_SIZE_BEFORE_GENESIS + 42  # total = 20,561
        a = bytearray([OP_CHECKSIG] * size)
        a[MAX_BLOCK_SIGOPS_PER_MB] = 0x4e
        a[MAX_BLOCK_SIGOPS_PER_MB + 1] = 0xfe
        a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0xff
        a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0xff
        a[MAX_BLOCK_SIGOPS_PER_MB + 4] = 0xff
        tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
        b74 = update_block(74, [tx])
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        tip(72)
        b75 = block(75)
        size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE_BEFORE_GENESIS + 42
        a = bytearray([OP_CHECKSIG] * size)
        a[MAX_BLOCK_SIGOPS_PER_MB - 1] = 0x4e
        a[MAX_BLOCK_SIGOPS_PER_MB] = 0xff
        a[MAX_BLOCK_SIGOPS_PER_MB + 1] = 0xff
        a[MAX_BLOCK_SIGOPS_PER_MB + 2] = 0xff
        a[MAX_BLOCK_SIGOPS_PER_MB + 3] = 0xff
        tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
        b75 = update_block(75, [tx])
        yield accepted()
        save_spendable_output()

        # Check that if we push an element filled with CHECKSIGs, they are not
        # counted
        tip(75)
        b76 = block(76)
        size = MAX_BLOCK_SIGOPS_PER_MB - 1 + MAX_SCRIPT_ELEMENT_SIZE_BEFORE_GENESIS + 1 + 5
        a = bytearray([OP_CHECKSIG] * size)
        a[MAX_BLOCK_SIGOPS_PER_MB -
          1] = 0x4e  # PUSHDATA4, but leave the following bytes as just checksigs
        tx = create_and_sign_tx(out[23].tx, 0, 1, CScript(a))
        b76 = update_block(76, [tx])
        yield accepted()
        save_spendable_output()

        # Test transaction resurrection
        #
        # -> b77 (24) -> b78 (25) -> b79 (26)
        #            \-> b80 (25) -> b81 (26) -> b82 (27)
        #
        #    b78 creates a tx, which is spent in b79. After b82, both should be in mempool
        #
        #    The tx'es must be unsigned and pass the node's mempool policy.  It is unsigned for the
        #    rather obscure reason that the Python signature code does not distinguish between
        #    Low-S and High-S values (whereas the bitcoin code has custom code which does so);
        #    as a result of which, the odds are 50% that the python code will use the right
        #    value and the transaction will be accepted into the mempool. Until we modify the
        #    test framework to support low-S signing, we are out of luck.
        #
        #    To get around this issue, we construct transactions which are not signed and which
        #    spend to OP_TRUE.  If the standard-ness rules change, this test would need to be
        #    updated.  (Perhaps to spend to a P2SH OP_TRUE script)
        #
        tip(76)
        block(77)
        tx77 = create_and_sign_tx(out[24].tx, out[24].n, 10 * COIN)
        update_block(77, [tx77])
        yield accepted()
        save_spendable_output()

        block(78)
        tx78 = create_tx(tx77, 0, 9 * COIN)
        update_block(78, [tx78])
        yield accepted()

        block(79)
        tx79 = create_tx(tx78, 0, 8 * COIN)
        update_block(79, [tx79])
        yield accepted()

        # mempool should be empty
        assert_equal(len(self.nodes[0].getrawmempool()), 0)

        tip(77)
        block(80, spend=out[25])
        yield rejected()
        save_spendable_output()

        block(81, spend=out[26])
        yield rejected()  # other chain is same length
        save_spendable_output()

        block(82, spend=out[27])
        yield accepted()  # now this chain is longer, triggers re-org
        save_spendable_output()

        # now check that tx78 and tx79 have been put back into the peer's
        # mempool
        mempool = self.nodes[0].getrawmempool()
        assert_equal(len(mempool), 2)
        assert (tx78.hash in mempool)
        assert (tx79.hash in mempool)

        # Test invalid opcodes in dead execution paths.
        #
        #  -> b81 (26) -> b82 (27) -> b83 (28)
        #
        b83 = block(83)
        op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF]
        script = CScript(op_codes)
        tx1 = create_and_sign_tx(out[28].tx, out[28].n,
                                 out[28].tx.vout[0].nValue, script)

        tx2 = create_and_sign_tx(tx1, 0, 0, CScript([OP_TRUE]))
        tx2.vin[0].scriptSig = CScript([OP_FALSE])
        tx2.rehash()

        update_block(83, [tx1, tx2])
        yield accepted()
        save_spendable_output()

        # Reorg on/off blocks that have OP_RETURN in them (and try to spend them)
        #
        #  -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31)
        #                                    \-> b85 (29) -> b86 (30)            \-> b89a (32)
        #
        #
        b84 = block(84)
        tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN]))
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.calc_sha256()
        sign_tx(tx1, out[29].tx, out[29].n, self.coinbase_key)
        tx1.rehash()
        tx2 = create_tx(tx1, 1, 0, CScript([OP_RETURN]))
        tx2.vout.append(CTxOut(0, CScript([OP_RETURN])))
        tx3 = create_tx(tx1, 2, 0, CScript([OP_RETURN]))
        tx3.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx4 = create_tx(tx1, 3, 0, CScript([OP_TRUE]))
        tx4.vout.append(CTxOut(0, CScript([OP_RETURN])))
        tx5 = create_tx(tx1, 4, 0, CScript([OP_RETURN]))

        update_block(84, [tx1, tx2, tx3, tx4, tx5])
        yield accepted()
        save_spendable_output()

        tip(83)
        block(85, spend=out[29])
        yield rejected()

        block(86, spend=out[30])
        yield accepted()

        tip(84)
        block(87, spend=out[30])
        yield rejected()
        save_spendable_output()

        block(88, spend=out[31])
        yield accepted()
        save_spendable_output()

        # trying to spend the OP_RETURN output is rejected
        block("89a", spend=out[32])
        tx = create_tx(tx1, 0, 0, CScript([OP_TRUE]))
        update_block("89a", [tx])
        yield rejected()

        #  Test re-org of a week's worth of blocks (1088 blocks)
        #  This test takes a minute or two and can be accomplished in memory
        #
        if self.options.runbarelyexpensive:
            tip(88)
            LARGE_REORG_SIZE = 1088
            test1 = TestInstance(sync_every_block=False)
            spend = out[32]
            for i in range(89, LARGE_REORG_SIZE + 89):
                b = block(i, spend)
                tx = CTransaction()
                script_length = LEGACY_MAX_BLOCK_SIZE - len(b.serialize()) - 69
                script_output = CScript([b'\x00' * script_length])
                tx.vout.append(CTxOut(0, script_output))
                tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0)))
                b = update_block(i, [tx])
                assert_equal(len(b.serialize()), LEGACY_MAX_BLOCK_SIZE)
                test1.blocks_and_transactions.append([self.chain.tip, True])
                save_spendable_output()
                spend = self.chain.get_spendable_output()

            yield test1
            chain1_tip = i

            # now create alt chain of same length
            tip(88)
            test2 = TestInstance(sync_every_block=False)
            for i in range(89, LARGE_REORG_SIZE + 89):
                block("alt" + str(i))
                test2.blocks_and_transactions.append([self.chain.tip, False])
            yield test2

            # extend alt chain to trigger re-org
            block("alt" + str(chain1_tip + 1))
            yield accepted()

            # ... and re-org back to the first chain
            tip(chain1_tip)
            block(chain1_tip + 1)
            yield rejected()
            block(chain1_tip + 2)
            yield accepted()

            chain1_tip += 2
class FullBlockTest(ComparisonTestFramework):

    # Can either run this test as 1 node with expected answers, or two and compare them.
    # Change the "outcome" variable from each TestInstance object to only do
    # the comparison.

    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.tip = None
        self.blocks = {}

    def setup_network(self):
        self.extra_args = [['-norelaypriority']]
        self.add_nodes(self.num_nodes, self.extra_args)
        self.start_nodes()

    def add_options(self, parser):
        super().add_options(parser)
        parser.add_argument("--runbarelyexpensive",
                            dest="runbarelyexpensive",
                            default=True)

    def run_test(self):
        self.test = TestManager(self, self.options.tmpdir)
        self.test.add_all_connections(self.nodes)
        network_thread_start()
        self.test.run()

    def add_transactions_to_block(self, block, tx_list):
        [tx.rehash() for tx in tx_list]
        block.vtx.extend(tx_list)
        block.vtx = [block.vtx[0]] + \
            sorted(block.vtx[1:], key=lambda tx: tx.get_id())

    # this is a little handier to use than the version in blocktools.py
    def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        tx = create_transaction(spend_tx, n, b"", value, script)
        return tx

    # sign a transaction, using the key we know about
    # this signs input 0 in tx, which is assumed to be spending output n in
    # spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
            tx.vin[0].scriptSig = CScript()
            return
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    def create_and_sign_transaction(self,
                                    spend_tx,
                                    n,
                                    value,
                                    script=CScript([OP_TRUE])):
        tx = self.create_tx(spend_tx, n, value, script)
        self.sign_tx(tx, spend_tx, n)
        tx.rehash()
        return tx

    def next_block(self,
                   number,
                   spend=None,
                   additional_coinbase_value=0,
                   script=CScript([OP_TRUE])):
        if self.tip == None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        coinbase.rehash()
        if spend == None:
            block = create_block(base_block_hash, coinbase, block_time)
        else:
            # all but one satoshi to fees
            coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1
            coinbase.rehash()
            block = create_block(base_block_hash, coinbase, block_time)
            # spend 1 satoshi
            tx = create_transaction(spend.tx, spend.n, b"", 1, script)
            self.sign_tx(tx, spend.tx, spend.n)
            self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
        # Do PoW, which is very inexpensive on regnet
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject=None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # adds transactions to the block and updates state
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            self.add_transactions_to_block(block, new_transactions)
            old_sha256 = block.sha256
            block.hashMerkleRoot = block.calc_merkle_root()
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            if block.sha256 != old_sha256:
                self.block_heights[
                    block.sha256] = self.block_heights[old_sha256]
                del self.block_heights[old_sha256]
            self.blocks[block_number] = block
            return block

        # shorthand for functions
        block = self.next_block
        create_tx = self.create_tx

        # shorthand for variables
        node = self.nodes[0]

        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()

        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(5000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(33):
            out.append(get_spendable_output())

        # P2SH
        # Build the redeem script, hash it, use hash to create the p2sh script
        redeem_script = CScript([self.coinbase_pubkey] +
                                [OP_2DUP, OP_CHECKSIGVERIFY] * 5 +
                                [OP_CHECKSIG])
        redeem_script_hash = hash160(redeem_script)
        p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])

        # Creates a new transaction using a p2sh transaction as input
        def spend_p2sh_tx(p2sh_tx_to_spend, output_script=CScript([OP_TRUE])):
            # Create the transaction
            spent_p2sh_tx = CTransaction()
            spent_p2sh_tx.vin.append(
                CTxIn(COutPoint(p2sh_tx_to_spend.sha256, 0), b''))
            spent_p2sh_tx.vout.append(CTxOut(1, output_script))
            # Sign the transaction using the redeem script
            sighash = SignatureHashForkId(redeem_script, spent_p2sh_tx, 0,
                                          SIGHASH_ALL | SIGHASH_FORKID,
                                          p2sh_tx_to_spend.vout[0].nValue)
            sig = self.coinbase_key.sign(sighash) + bytes(
                bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            spent_p2sh_tx.vin[0].scriptSig = CScript([sig, redeem_script])
            spent_p2sh_tx.rehash()
            return spent_p2sh_tx

        # P2SH tests
        # Create a p2sh transaction
        p2sh_tx = self.create_and_sign_transaction(out[0].tx, out[0].n, 1,
                                                   p2sh_script)

        # Add the transaction to the block
        block(1)
        update_block(1, [p2sh_tx])
        yield accepted()

        # Sigops p2sh limit for the mempool test
        p2sh_sigops_limit_mempool = MAX_STANDARD_TX_SIGOPS - \
            redeem_script.GetSigOpCount(True)
        # Too many sigops in one p2sh script
        too_many_p2sh_sigops_mempool = CScript([OP_CHECKSIG] *
                                               (p2sh_sigops_limit_mempool + 1))

        # A transaction with this output script can't get into the mempool
        assert_raises_rpc_error(
            -26, RPC_TXNS_TOO_MANY_SIGOPS_ERROR, node.sendrawtransaction,
            ToHex(spend_p2sh_tx(p2sh_tx, too_many_p2sh_sigops_mempool)))

        # The transaction is rejected, so the mempool should still be empty
        assert_equal(set(node.getrawmempool()), set())

        # Max sigops in one p2sh txn
        max_p2sh_sigops_mempool = CScript([OP_CHECKSIG] *
                                          (p2sh_sigops_limit_mempool))

        # A transaction with this output script can get into the mempool
        max_p2sh_sigops_txn = spend_p2sh_tx(p2sh_tx, max_p2sh_sigops_mempool)
        max_p2sh_sigops_txn_id = node.sendrawtransaction(
            ToHex(max_p2sh_sigops_txn))
        assert_equal(set(node.getrawmempool()), {max_p2sh_sigops_txn_id})

        # Mine the transaction
        block(2, spend=out[1])
        update_block(2, [max_p2sh_sigops_txn])
        yield accepted()

        # The transaction has been mined, it's not in the mempool anymore
        assert_equal(set(node.getrawmempool()), set())
class FullBlockTest(ComparisonTestFramework):

    ''' Can either run this test as 1 node with expected answers, or two and compare them. 
        Change the "outcome" variable from each TestInstance object to only do the comparison. '''
    def __init__(self):
        self.num_nodes = 1
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.block_time = int(time.time())+1
        self.tip = None
        self.blocks = {}

    def run_test(self):
        test = TestManager(self, self.options.tmpdir)
        test.add_all_connections(self.nodes)
        NetworkThread().start() # Start up network handling in another thread
        test.run()

    def add_transactions_to_block(self, block, tx_list):
        [ tx.rehash() for tx in tx_list ]
        block.vtx.extend(tx_list)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        return block
    
    # Create a block on top of self.tip, and advance self.tip to point to the new block
    # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
    # and rest will go to fees.
    def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
        if self.tip == None:
            base_block_hash = self.genesis_hash
        else:
            base_block_hash = self.tip.sha256
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, self.block_time)
        if (spend != None):
            tx = CTransaction()
            tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), b"", 0xffffffff))  # no signature yet
            # This copies the java comparison tool testing behavior: the first
            # txout has a garbage scriptPubKey, "to make sure we're not
            # pre-verifying too much" (?)
            tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            # Now sign it if necessary
            scriptSig = b""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)
                scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            block = self.add_transactions_to_block(block, [tx])
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        self.block_time += 1
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previous marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject = None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])
       
        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # add transactions to a block produced by next_block
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            old_hash = block.sha256
            self.add_transactions_to_block(block, new_transactions)
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            self.block_heights[block.sha256] = self.block_heights[old_hash]
            del self.block_heights[old_hash]
            self.blocks[block_number] = block
            return block

        # creates a new block and advances the tip to that block
        block = self.next_block


        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()


        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(1000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test


        # Start by building a couple of blocks on top (which output is spent is
        # in parentheses):
        #     genesis -> b1 (0) -> b2 (1)
        out0 = get_spendable_output()
        block(1, spend=out0)
        save_spendable_output()
        yield accepted()

        out1 = get_spendable_output()
        b2 = block(2, spend=out1)
        yield accepted()


        # so fork like this:
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1)
        # 
        # Nothing should happen at this point. We saw b2 first so it takes priority.
        tip(1)
        b3 = block(3, spend=out1)
        txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1)
        yield rejected()


        # Now we add another block to make the alternative chain longer.
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1) -> b4 (2)
        out2 = get_spendable_output()
        block(4, spend=out2)
        yield accepted()


        # ... and back to the first chain.
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                      \-> b3 (1) -> b4 (2)
        tip(2)
        block(5, spend=out2)
        save_spendable_output()
        yield rejected()

        out3 = get_spendable_output()
        block(6, spend=out3)
        yield accepted()


        # Try to create a fork that double-spends
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                          \-> b7 (2) -> b8 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(7, spend=out2)
        yield rejected()

        out4 = get_spendable_output()
        block(8, spend=out4)
        yield rejected()


        # Try to create a block that has too much fee
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                                    \-> b9 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(6)
        block(9, spend=out4, additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))

        
        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b10 (3) -> b11 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(10, spend=out3)
        yield rejected()

        block(11, spend=out4, additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))


        # Try again, but with a valid fork first
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
        #                                              (b12 added last)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        b12 = block(12, spend=out3)
        save_spendable_output()
        #yield TestInstance([[b12, False]])
        b13 = block(13, spend=out4)
        # Deliver the block header for b12, and the block b13.
        # b13 should be accepted but the tip won't advance until b12 is delivered.
        yield TestInstance([[CBlockHeader(b12), None], [b13, False]])

        save_spendable_output()
        out5 = get_spendable_output()
        # b14 is invalid, but the node won't know that until it tries to connect
        # Tip still can't advance because b12 is missing
        block(14, spend=out5, additional_coinbase_value=1)
        yield rejected()

        yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.

        # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
        #                      \-> b3 (1) -> b4 (2)
        
        # Test that a block with a lot of checksigs is okay
        lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50 - 1))
        tip(13)
        block(15, spend=out5, script=lots_of_checksigs)
        yield accepted()


        # Test that a block with too many checksigs is rejected
        out6 = get_spendable_output()
        too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50))
        block(16, spend=out6, script=too_many_checksigs)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))


        # Attempt to spend a transaction created on a different fork
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        block(17, spend=txout_b3)
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # Attempt to spend a transaction created on a different fork (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b18 (b3.vtx[1]) -> b19 (6)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(18, spend=txout_b3)
        yield rejected()

        block(19, spend=out6)
        yield rejected()

        # Attempt to spend a coinbase at depth too low
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        out7 = get_spendable_output()
        block(20, spend=out7)
        yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))

        # Attempt to spend a coinbase at depth too low (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b21 (6) -> b22 (5)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(21, spend=out6)
        yield rejected()

        block(22, spend=out5)
        yield rejected()

        # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
        #                                                                           \-> b24 (6) -> b25 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        b23 = block(23, spend=out6)
        old_hash = b23.sha256
        tx = CTransaction()
        script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69
        script_output = CScript([b'\x00' * script_length])
        tx.vout.append(CTxOut(0, script_output))
        tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1)))
        b23 = update_block(23, [tx])
        # Make sure the math above worked out to produce a max-sized block
        assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE)
        yield accepted()

        # Make the next block one byte bigger and check that it fails
        tip(15)
        b24 = block(24, spend=out6)
        script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69
        script_output = CScript([b'\x00' * (script_length+1)])
        tx.vout = [CTxOut(0, script_output)]
        b24 = update_block(24, [tx])
        assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1)
        yield rejected(RejectResult(16, b'bad-blk-length'))

        b25 = block(25, spend=out7)
        yield rejected()

        # Create blocks with a coinbase input script size out of range
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
        #                                                                           \-> ... (6) -> ... (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        b26 = block(26, spend=out6)
        b26.vtx[0].vin[0].scriptSig = b'\x00'
        b26.vtx[0].rehash()
        # update_block causes the merkle root to get updated, even with no new
        # transactions, and updates the required state.
        b26 = update_block(26, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b26 chain to make sure bitcreditd isn't accepting b26
        b27 = block(27, spend=out7)
        yield rejected()

        # Now try a too-large-coinbase script
        tip(15)
        b28 = block(28, spend=out6)
        b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
        b28.vtx[0].rehash()
        b28 = update_block(28, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b28 chain to make sure bitcreditd isn't accepted b28
        b29 = block(29, spend=out7)
        # TODO: Should get a reject message back with "bad-prevblk", except
        # there's a bug that prevents this from being detected.  Just note
        # failure for now, and add the reject result later.
        yield rejected()

        # b30 has a max-sized coinbase scriptSig.
        tip(23)
        b30 = block(30)
        b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
        b30.vtx[0].rehash()
        b30 = update_block(30, [])
        yield accepted()
Esempio n. 5
0
class FullBlockTest(ComparisonTestFramework):

    # Can either run this test as 1 node with expected answers, or two and compare them.
    # Change the "outcome" variable from each TestInstance object to only do
    # the comparison.

    def __init__(self):
        super().__init__()
        self.excessive_block_size = 16 * ONE_MEGABYTE
        self.num_nodes = 1
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"fatstacks")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.tip = None
        self.blocks = {}

    def setup_network(self):
        self.extra_args = [[
            '-debug', '-norelaypriority', '-whitelist=127.0.0.1',
            '-limitancestorcount=9999', '-limitancestorsize=9999',
            '-limitdescendantcount=9999', '-limitdescendantsize=9999',
            '-maxmempool=999',
            "-uahfstarttime=%d" % UAHF_START_TIME,
            "-excessiveblocksize=%d" % self.excessive_block_size
        ]]
        self.nodes = start_nodes(self.num_nodes,
                                 self.options.tmpdir,
                                 self.extra_args,
                                 binary=[self.options.testbinary])

    def add_options(self, parser):
        super().add_options(parser)
        parser.add_option("--runbarelyexpensive",
                          dest="runbarelyexpensive",
                          default=True)

    def run_test(self):
        self.test = TestManager(self, self.options.tmpdir)
        self.test.add_all_connections(self.nodes)
        # Start up network handling in another thread
        NetworkThread().start()
        # Set the blocksize to 2MB as initial condition
        self.nodes[0].setexcessiveblock(self.excessive_block_size)
        self.nodes[0].setmocktime(UAHF_START_TIME)
        self.test.run()

    def add_transactions_to_block(self, block, tx_list):
        [tx.rehash() for tx in tx_list]
        block.vtx.extend(tx_list)

    # this is a little handier to use than the version in blocktools.py
    def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        tx = create_transaction(spend_tx, n, b"", value, script)
        return tx

    # sign a transaction, using the key we know about
    # this signs input 0 in tx, which is assumed to be spending output n in
    # spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
            tx.vin[0].scriptSig = CScript()
            return
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    def create_and_sign_transaction(self,
                                    spend_tx,
                                    n,
                                    value,
                                    script=CScript([OP_TRUE])):
        tx = self.create_tx(spend_tx, n, value, script)
        self.sign_tx(tx, spend_tx, n)
        tx.rehash()
        return tx

    def next_block(self,
                   number,
                   spend=None,
                   additional_coinbase_value=0,
                   script=None,
                   extra_sigops=0,
                   block_size=0,
                   solve=True):
        """
        Create a block on top of self.tip, and advance self.tip to point to the new block
        if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend
        output, and rest will go to fees.
        """
        if self.tip == None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[
                spend.n].nValue - 1  # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, block_time)
        spendable_output = None
        if (spend != None):
            tx = CTransaction()
            tx.vin.append(
                CTxIn(COutPoint(spend.tx.sha256, spend.n), b"",
                      0xffffffff))  # no signature yet
            # We put some random data into the first transaction of the chain
            # to randomize ids
            tx.vout.append(
                CTxOut(0, CScript([random.randint(0, 255), OP_DROP, OP_TRUE])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            spendable_output = PreviousSpendableOutput(tx, 0)

            # Now sign it if necessary
            scriptSig = b""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                sighash = SignatureHashForkId(
                    spend.tx.vout[spend.n].scriptPubKey, tx, 0,
                    SIGHASH_ALL | SIGHASH_FORKID,
                    spend.tx.vout[spend.n].nValue)
                scriptSig = CScript([
                    self.coinbase_key.sign(sighash) +
                    bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
                ])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
        if spendable_output != None and block_size > 0:
            while len(block.serialize()) < block_size:
                tx = CTransaction()
                script_length = block_size - len(block.serialize()) - 79
                if script_length > 510000:
                    script_length = 500000
                tx_sigops = min(extra_sigops, script_length,
                                MAX_TX_SIGOPS_COUNT)
                extra_sigops -= tx_sigops
                script_pad_len = script_length - tx_sigops
                script_output = CScript([b'\x00' * script_pad_len] +
                                        [OP_CHECKSIG] * tx_sigops)
                tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
                tx.vout.append(CTxOut(0, script_output))
                tx.vin.append(
                    CTxIn(
                        COutPoint(spendable_output.tx.sha256,
                                  spendable_output.n)))
                spendable_output = PreviousSpendableOutput(tx, 0)
                self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
            # Make sure the math above worked out to produce the correct block size
            # (the math will fail if there are too many transactions in the block)
            assert_equal(len(block.serialize()), block_size)
            # Make sure all the requested sigops have been included
            assert_equal(extra_sigops, 0)
        if solve:
            block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject=None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # adds transactions to the block and updates state
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            self.add_transactions_to_block(block, new_transactions)
            old_sha256 = block.sha256
            block.hashMerkleRoot = block.calc_merkle_root()
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            if block.sha256 != old_sha256:
                self.block_heights[
                    block.sha256] = self.block_heights[old_sha256]
                del self.block_heights[old_sha256]
            self.blocks[block_number] = block
            return block

        # shorthand for functions
        block = self.next_block

        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()

        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(5000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test

        # In order to trigger the HF, we need one block past activation time
        bfork = block(5555)
        bfork.nTime = UAHF_START_TIME
        update_block(5555, [])
        save_spendable_output()
        yield accepted()

        # Then we pile 5 blocks to move MTP forward and trigger the HF
        for i in range(5):
            block(5100 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test

        # Create a new block and activate the fork, the block needs
        # to be > 1MB . For more specific tests about the fork activation,
        # check abc-p2p-activation.py
        block(5556,
              spend=get_spendable_output(),
              block_size=LEGACY_MAX_BLOCK_SIZE + 1)
        yield accepted()

        # collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(100):
            out.append(get_spendable_output())

        # Let's build some blocks and test them.
        for i in range(16):
            n = i + 1
            block(n, spend=out[i], block_size=n * ONE_MEGABYTE)
            yield accepted()

        # block of maximal size
        block(17, spend=out[16], block_size=self.excessive_block_size)
        yield accepted()

        # Reject oversized blocks with bad-blk-length error
        block(18, spend=out[17], block_size=self.excessive_block_size + 1)
        yield rejected(RejectResult(16, b'bad-blk-length'))

        # Rewind bad block.
        tip(17)

        # Accept many sigops
        lots_of_checksigs = CScript([OP_CHECKSIG] *
                                    (MAX_BLOCK_SIGOPS_PER_MB - 1))
        block(19,
              spend=out[17],
              script=lots_of_checksigs,
              block_size=ONE_MEGABYTE)
        yield accepted()

        too_many_blk_checksigs = CScript([OP_CHECKSIG] *
                                         MAX_BLOCK_SIGOPS_PER_MB)
        block(20,
              spend=out[18],
              script=too_many_blk_checksigs,
              block_size=ONE_MEGABYTE)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(19)

        # Accept 40k sigops per block > 1MB and <= 2MB
        block(21,
              spend=out[18],
              script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB,
              block_size=ONE_MEGABYTE + 1)
        yield accepted()

        # Accept 40k sigops per block > 1MB and <= 2MB
        block(22,
              spend=out[19],
              script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB,
              block_size=2 * ONE_MEGABYTE)
        yield accepted()

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(23,
              spend=out[20],
              script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB + 1,
              block_size=ONE_MEGABYTE + 1)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(22)

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(24,
              spend=out[20],
              script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB + 1,
              block_size=2 * ONE_MEGABYTE)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(22)

        # Accept 60k sigops per block > 2MB and <= 3MB
        block(25,
              spend=out[20],
              script=lots_of_checksigs,
              extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB,
              block_size=2 * ONE_MEGABYTE + 1)
        yield accepted()

        # Accept 60k sigops per block > 2MB and <= 3MB
        block(26,
              spend=out[21],
              script=lots_of_checksigs,
              extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB,
              block_size=3 * ONE_MEGABYTE)
        yield accepted()

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(27,
              spend=out[22],
              script=lots_of_checksigs,
              extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB + 1,
              block_size=2 * ONE_MEGABYTE + 1)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(26)

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(28,
              spend=out[22],
              script=lots_of_checksigs,
              extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB + 1,
              block_size=3 * ONE_MEGABYTE)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(26)

        # Too many sigops in one txn
        too_many_tx_checksigs = CScript([OP_CHECKSIG] *
                                        (MAX_BLOCK_SIGOPS_PER_MB + 1))
        block(29,
              spend=out[22],
              script=too_many_tx_checksigs,
              block_size=ONE_MEGABYTE + 1)
        yield rejected(RejectResult(16, b'bad-txn-sigops'))

        # Rewind bad block
        tip(26)

        # P2SH
        # Build the redeem script, hash it, use hash to create the p2sh script
        redeem_script = CScript([self.coinbase_pubkey] +
                                [OP_2DUP, OP_CHECKSIGVERIFY] * 5 +
                                [OP_CHECKSIG])
        redeem_script_hash = hash160(redeem_script)
        p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])

        # Create a p2sh transaction
        p2sh_tx = self.create_and_sign_transaction(out[22].tx, out[22].n, 1,
                                                   p2sh_script)

        # Add the transaction to the block
        block(30)
        update_block(30, [p2sh_tx])
        yield accepted()

        # Creates a new transaction using the p2sh transaction included in the
        # last block
        def spend_p2sh_tx(output_script=CScript([OP_TRUE])):
            # Create the transaction
            spent_p2sh_tx = CTransaction()
            spent_p2sh_tx.vin.append(CTxIn(COutPoint(p2sh_tx.sha256, 0), b''))
            spent_p2sh_tx.vout.append(CTxOut(1, output_script))
            # Sign the transaction using the redeem script
            sighash = SignatureHashForkId(redeem_script, spent_p2sh_tx, 0,
                                          SIGHASH_ALL | SIGHASH_FORKID,
                                          p2sh_tx.vout[0].nValue)
            sig = self.coinbase_key.sign(sighash) + bytes(
                bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            spent_p2sh_tx.vin[0].scriptSig = CScript([sig, redeem_script])
            spent_p2sh_tx.rehash()
            return spent_p2sh_tx

        # Sigops p2sh limit
        p2sh_sigops_limit = MAX_BLOCK_SIGOPS_PER_MB - \
            redeem_script.GetSigOpCount(True)
        # Too many sigops in one p2sh txn
        too_many_p2sh_sigops = CScript([OP_CHECKSIG] * (p2sh_sigops_limit + 1))
        block(31, spend=out[23], block_size=ONE_MEGABYTE + 1)
        update_block(31, [spend_p2sh_tx(too_many_p2sh_sigops)])
        yield rejected(RejectResult(16, b'bad-txn-sigops'))

        # Rewind bad block
        tip(30)

        # Max sigops in one p2sh txn
        max_p2sh_sigops = CScript([OP_CHECKSIG] * (p2sh_sigops_limit))
        block(32, spend=out[23], block_size=ONE_MEGABYTE + 1)
        update_block(32, [spend_p2sh_tx(max_p2sh_sigops)])
        yield accepted()

        # Check that compact block also work for big blocks
        node = self.nodes[0]
        peer = TestNode()
        peer.add_connection(NodeConn('127.0.0.1', p2p_port(0), node, peer))

        # Start up network handling in another thread and wait for connection
        # to be etablished
        NetworkThread().start()
        peer.wait_for_verack()

        # Wait for SENDCMPCT
        def received_sendcmpct():
            return (peer.last_sendcmpct != None)

        got_sendcmpt = wait_until(received_sendcmpct, timeout=30)
        assert (got_sendcmpt)

        sendcmpct = msg_sendcmpct()
        sendcmpct.version = 1
        sendcmpct.announce = True
        peer.send_and_ping(sendcmpct)

        # Exchange headers
        def received_getheaders():
            return (peer.last_getheaders != None)

        got_getheaders = wait_until(received_getheaders, timeout=30)
        assert (got_getheaders)

        # Return the favor
        peer.send_message(peer.last_getheaders)

        # Wait for the header list
        def received_headers():
            return (peer.last_headers != None)

        got_headers = wait_until(received_headers, timeout=30)
        assert (got_headers)

        # It's like we know about the same headers !
        peer.send_message(peer.last_headers)

        # Send a block
        b33 = block(33, spend=out[24], block_size=ONE_MEGABYTE + 1)
        yield accepted()

        # Checks the node to forward it via compact block
        def received_block():
            return (peer.last_cmpctblock != None)

        got_cmpctblock = wait_until(received_block, timeout=30)
        assert (got_cmpctblock)

        # Was it our block ?
        cmpctblk_header = peer.last_cmpctblock.header_and_shortids.header
        cmpctblk_header.calc_sha256()
        assert (cmpctblk_header.sha256 == b33.sha256)

        # Send a bigger block
        peer.clear_block_data()
        b34 = block(34, spend=out[25], block_size=8 * ONE_MEGABYTE)
        yield accepted()

        # Checks the node to forward it via compact block
        got_cmpctblock = wait_until(received_block, timeout=30)
        assert (got_cmpctblock)

        # Was it our block ?
        cmpctblk_header = peer.last_cmpctblock.header_and_shortids.header
        cmpctblk_header.calc_sha256()
        assert (cmpctblk_header.sha256 == b34.sha256)

        # Let's send a compact block and see if the node accepts it.
        # First, we generate the block and send all transaction to the mempool
        b35 = block(35, spend=out[26], block_size=8 * ONE_MEGABYTE)
        for i in range(1, len(b35.vtx)):
            node.sendrawtransaction(ToHex(b35.vtx[i]), True)

        # Now we create the compact block and send it
        comp_block = HeaderAndShortIDs()
        comp_block.initialize_from_block(b35)
        peer.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))

        # Check that compact block is received properly
        assert (int(node.getbestblockhash(), 16) == b35.sha256)
Esempio n. 6
0
class CompactBlocksTest(BitcoinTestFramework):
    def set_test_params(self):
        self.setup_clean_chain = True
        self.num_nodes = 1
        self.extra_args = [[
            "-acceptnonstdtxn=1",
        ]]
        self.utxos = []
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()

    def skip_test_if_missing_module(self):
        self.skip_if_no_wallet()

    def build_block_on_tip(self, node, segwit=False):
        height = node.getblockcount()
        tip = node.getbestblockhash()
        mtp = node.getblockheader(tip)['mediantime']
        block = create_block(
            int(tip, 16), create_coinbase(height + 1, self.coinbase_pubkey,
                                          mtp), mtp + 1)
        block.nVersion = 4
        if segwit:
            add_witness_commitment(block)
        block.solve()
        block.vchBlockSig = self.coinbase_key.sign(
            bytes.fromhex(block.hash)[::-1])
        return block

    # Create 10 more anyone-can-spend utxo's for testing.
    def make_utxos(self):
        block = self.build_block_on_tip(self.nodes[0])
        self.segwit_node.send_and_ping(msg_no_witness_block(block))
        assert int(self.nodes[0].getbestblockhash(), 16) == block.sha256
        self.nodes[0].generatetoaddress(
            100, self.nodes[0].getnewaddress(address_type="bech32"))

        total_value = block.vtx[0].vout[0].nValue
        out_value = total_value // 10
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(block.vtx[0].sha256, 0), b''))
        for i in range(10):
            tx.vout.append(CTxOut(out_value, CScript([OP_TRUE])))
        tx.rehash()

        block2 = self.build_block_on_tip(self.nodes[0])
        block2.vtx.append(tx)
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.solve()
        self.segwit_node.send_and_ping(msg_no_witness_block(block2))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block2.sha256)
        self.utxos.extend([[tx.sha256, i, out_value] for i in range(10)])

    # Test "sendcmpct" (between peers preferring the same version):
    # - No compact block announcements unless sendcmpct is sent.
    # - If sendcmpct is sent with version > preferred_version, the message is ignored.
    # - If sendcmpct is sent with boolean 0, then block announcements are not
    #   made with compact blocks.
    # - If sendcmpct is then sent with boolean 1, then new block announcements
    #   are made with compact blocks.
    # If old_node is passed in, request compact blocks with version=preferred-1
    # and verify that it receives block announcements via compact block.
    def test_sendcmpct(self, test_node, old_node=None):
        preferred_version = test_node.cmpct_version
        node = self.nodes[0]

        # Make sure we get a SENDCMPCT message from our peer
        def received_sendcmpct():
            return (len(test_node.last_sendcmpct) > 0)

        wait_until(received_sendcmpct, timeout=30, lock=mininode_lock)
        with mininode_lock:
            # Check that the first version received is the preferred one
            assert_equal(test_node.last_sendcmpct[0].version,
                         preferred_version)
            # And that we receive versions down to 1.
            assert_equal(test_node.last_sendcmpct[-1].version, 1)
            test_node.last_sendcmpct = []

        tip = int(node.getbestblockhash(), 16)

        def check_announcement_of_new_block(node, peer, predicate):
            peer.clear_block_announcement()
            block_hash = int(node.generate(1)[0], 16)
            peer.wait_for_block_announcement(block_hash, timeout=30)
            assert peer.block_announced

            with mininode_lock:
                assert predicate(peer), (
                    "block_hash={!r}, cmpctblock={!r}, inv={!r}".format(
                        block_hash, peer.last_message.get("cmpctblock", None),
                        peer.last_message.get("inv", None)))

        # We shouldn't get any block announcements via cmpctblock yet.
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" not in p.last_message)

        # Try one more time, this time after requesting headers.
        test_node.request_headers_and_sync(locator=[tip])
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" not in p.last_message and
            "inv" in p.last_message)

        # Test a few ways of using sendcmpct that should NOT
        # result in compact block announcements.
        # Before each test, sync the headers chain.
        test_node.request_headers_and_sync(locator=[tip])

        # Now try a SENDCMPCT message with too-high version
        sendcmpct = msg_sendcmpct()
        sendcmpct.version = preferred_version + 1
        sendcmpct.announce = True
        test_node.send_and_ping(sendcmpct)
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" not in p.last_message)

        # Headers sync before next test.
        test_node.request_headers_and_sync(locator=[tip])

        # Now try a SENDCMPCT message with valid version, but announce=False
        sendcmpct.version = preferred_version
        sendcmpct.announce = False
        test_node.send_and_ping(sendcmpct)
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" not in p.last_message)

        # Headers sync before next test.
        test_node.request_headers_and_sync(locator=[tip])

        # Finally, try a SENDCMPCT message with announce=True
        sendcmpct.version = preferred_version
        sendcmpct.announce = True
        test_node.send_and_ping(sendcmpct)
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" in p.last_message)

        # Try one more time (no headers sync should be needed!)
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" in p.last_message)

        # Try one more time, after turning on sendheaders
        test_node.send_and_ping(msg_sendheaders())
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" in p.last_message)

        # Try one more time, after sending a version-1, announce=false message.
        sendcmpct.version = preferred_version - 1
        sendcmpct.announce = False
        test_node.send_and_ping(sendcmpct)
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" in p.last_message)

        # Now turn off announcements
        sendcmpct.version = preferred_version
        sendcmpct.announce = False
        test_node.send_and_ping(sendcmpct)
        check_announcement_of_new_block(
            node, test_node, lambda p: "cmpctblock" not in p.last_message and
            "headers" in p.last_message)

        if old_node is not None:
            # Verify that a peer using an older protocol version can receive
            # announcements from this node.
            sendcmpct.version = preferred_version - 1
            sendcmpct.announce = True
            old_node.send_and_ping(sendcmpct)
            # Header sync
            old_node.request_headers_and_sync(locator=[tip])
            check_announcement_of_new_block(
                node, old_node, lambda p: "cmpctblock" in p.last_message)

    # This test actually causes bitcoind to (reasonably!) disconnect us, so do this last.
    def test_invalid_cmpctblock_message(self):
        self.nodes[0].generate(101)
        block = self.build_block_on_tip(self.nodes[0])

        cmpct_block = P2PHeaderAndShortIDs()
        cmpct_block.header = CBlockHeader(block)
        cmpct_block.prefilled_txn_length = 1
        # This index will be too high
        prefilled_txn = PrefilledTransaction(1, block.vtx[0])
        cmpct_block.prefilled_txn = [prefilled_txn]
        self.segwit_node.send_await_disconnect(msg_cmpctblock(cmpct_block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16),
                     block.hashPrevBlock)

    # Compare the generated shortids to what we expect based on BIP 152, given
    # bitcoind's choice of nonce.
    def test_compactblock_construction(self,
                                       test_node,
                                       use_witness_address=True):
        version = test_node.cmpct_version
        node = self.nodes[0]
        # Generate a bunch of transactions.
        node.generate(101)
        num_transactions = 25
        address = node.getnewaddress()

        segwit_tx_generated = False
        for i in range(num_transactions):
            txid = node.sendtoaddress(address, 0.1)
            hex_tx = node.gettransaction(txid)["hex"]
            tx = FromHex(CTransaction(), hex_tx)
            if not tx.wit.is_null():
                segwit_tx_generated = True

        if use_witness_address:
            assert segwit_tx_generated  # check that our test is not broken

        # Wait until we've seen the block announcement for the resulting tip
        tip = int(node.getbestblockhash(), 16)
        test_node.wait_for_block_announcement(tip)

        # Make sure we will receive a fast-announce compact block
        self.request_cb_announcements(test_node)

        # Now mine a block, and look at the resulting compact block.
        test_node.clear_block_announcement()
        block_hash = int(node.generate(1)[0], 16)

        # Store the raw block in our internal format.
        block = FromHex(CBlock(), node.getblock("%064x" % block_hash, False))
        for tx in block.vtx:
            tx.calc_sha256()
        block.rehash()

        # Wait until the block was announced (via compact blocks)
        wait_until(test_node.received_block_announcement,
                   timeout=30,
                   lock=mininode_lock)

        # Now fetch and check the compact block
        header_and_shortids = None
        with mininode_lock:
            assert "cmpctblock" in test_node.last_message
            # Convert the on-the-wire representation to absolute indexes
            header_and_shortids = HeaderAndShortIDs(
                test_node.last_message["cmpctblock"].header_and_shortids)
        self.check_compactblock_construction_from_block(
            version, header_and_shortids, block_hash, block)

        # Now fetch the compact block using a normal non-announce getdata
        with mininode_lock:
            test_node.clear_block_announcement()
            inv = CInv(4, block_hash)  # 4 == "CompactBlock"
            test_node.send_message(msg_getdata([inv]))

        wait_until(test_node.received_block_announcement,
                   timeout=30,
                   lock=mininode_lock)

        # Now fetch and check the compact block
        header_and_shortids = None
        with mininode_lock:
            assert "cmpctblock" in test_node.last_message
            # Convert the on-the-wire representation to absolute indexes
            header_and_shortids = HeaderAndShortIDs(
                test_node.last_message["cmpctblock"].header_and_shortids)
        self.check_compactblock_construction_from_block(
            version, header_and_shortids, block_hash, block)

    def check_compactblock_construction_from_block(self, version,
                                                   header_and_shortids,
                                                   block_hash, block):
        # Check that we got the right block!
        header_and_shortids.header.calc_sha256()
        assert_equal(header_and_shortids.header.sha256, block_hash)

        # Make sure the prefilled_txn appears to have included the coinbase
        assert len(header_and_shortids.prefilled_txn) >= 1
        assert_equal(header_and_shortids.prefilled_txn[0].index, 0)

        # Check that all prefilled_txn entries match what's in the block.
        for entry in header_and_shortids.prefilled_txn:
            entry.tx.calc_sha256()
            # This checks the non-witness parts of the tx agree
            assert_equal(entry.tx.sha256, block.vtx[entry.index].sha256)

            # And this checks the witness
            wtxid = entry.tx.calc_sha256(True)
            if version == 2:
                assert_equal(wtxid, block.vtx[entry.index].calc_sha256(True))
            else:
                # Shouldn't have received a witness
                assert entry.tx.wit.is_null()

        # Check that the cmpctblock message announced all the transactions.
        assert_equal(
            len(header_and_shortids.prefilled_txn) +
            len(header_and_shortids.shortids), len(block.vtx))

        # And now check that all the shortids are as expected as well.
        # Determine the siphash keys to use.
        [k0, k1] = header_and_shortids.get_siphash_keys()

        index = 0
        while index < len(block.vtx):
            if (len(header_and_shortids.prefilled_txn) > 0
                    and header_and_shortids.prefilled_txn[0].index == index):
                # Already checked prefilled transactions above
                header_and_shortids.prefilled_txn.pop(0)
            else:
                tx_hash = block.vtx[index].sha256
                if version == 2:
                    tx_hash = block.vtx[index].calc_sha256(True)
                shortid = calculate_shortid(k0, k1, tx_hash)
                assert_equal(shortid, header_and_shortids.shortids[0])
                header_and_shortids.shortids.pop(0)
            index += 1

    # Test that bitcoind requests compact blocks when we announce new blocks
    # via header or inv, and that responding to getblocktxn causes the block
    # to be successfully reconstructed.
    # Post-segwit: upgraded nodes would only make this request of cb-version-2,
    # NODE_WITNESS peers.  Unupgraded nodes would still make this request of
    # any cb-version-1-supporting peer.
    def test_compactblock_requests(self, test_node, segwit=True):
        version = test_node.cmpct_version
        node = self.nodes[0]
        # Try announcing a block with an inv or header, expect a compactblock
        # request
        for announce in ["inv", "header"]:
            block = self.build_block_on_tip(node, segwit=segwit)
            with mininode_lock:
                test_node.last_message.pop("getdata", None)

            if announce == "inv":
                test_node.send_message(msg_inv([CInv(2, block.sha256)]))
                wait_until(lambda: "getheaders" in test_node.last_message,
                           timeout=30,
                           lock=mininode_lock)
                test_node.send_header_for_blocks([block])
            else:
                test_node.send_header_for_blocks([block])
            wait_until(lambda: "getdata" in test_node.last_message,
                       timeout=30,
                       lock=mininode_lock)
            assert_equal(len(test_node.last_message["getdata"].inv), 1)
            assert_equal(test_node.last_message["getdata"].inv[0].type, 4)
            assert_equal(test_node.last_message["getdata"].inv[0].hash,
                         block.sha256)

            # Send back a compactblock message that omits the coinbase
            comp_block = HeaderAndShortIDs()
            comp_block.header = CBlockHeader(block)
            comp_block.nonce = 0
            [k0, k1] = comp_block.get_siphash_keys()
            coinbase_hash = block.vtx[0].sha256
            if version == 2:
                coinbase_hash = block.vtx[0].calc_sha256(True)
            comp_block.shortids = [calculate_shortid(k0, k1, coinbase_hash)]
            test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
            assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
            # Expect a getblocktxn message.
            with mininode_lock:
                assert "getblocktxn" in test_node.last_message
                absolute_indexes = test_node.last_message[
                    "getblocktxn"].block_txn_request.to_absolute()
            assert_equal(absolute_indexes, [0])  # should be a coinbase request

            # Send the coinbase, and verify that the tip advances.
            if version == 2:
                msg = msg_blocktxn()
            else:
                msg = msg_no_witness_blocktxn()
            msg.block_transactions.blockhash = block.sha256
            msg.block_transactions.transactions = [block.vtx[0]]
            test_node.send_and_ping(msg)
            assert_equal(int(node.getbestblockhash(), 16), block.sha256)

    # Create a chain of transactions from given utxo, and add to a new block.
    def build_block_with_transactions(self, node, utxo, num_transactions):
        block = self.build_block_on_tip(node)

        for i in range(num_transactions):
            tx = CTransaction()
            tx.vin.append(CTxIn(COutPoint(utxo[0], utxo[1]), b''))
            tx.vout.append(
                CTxOut(utxo[2] - 1000,
                       CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
            tx.rehash()
            utxo = [tx.sha256, 0, tx.vout[0].nValue]
            block.vtx.append(tx)

        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()
        return block

    # Test that we only receive getblocktxn requests for transactions that the
    # node needs, and that responding to them causes the block to be
    # reconstructed.
    def test_getblocktxn_requests(self, test_node):
        version = test_node.cmpct_version
        node = self.nodes[0]
        with_witness = (version == 2)

        def test_getblocktxn_response(compact_block, peer, expected_result):
            msg = msg_cmpctblock(compact_block.to_p2p())
            peer.send_and_ping(msg)
            with mininode_lock:
                assert "getblocktxn" in peer.last_message
                absolute_indexes = peer.last_message[
                    "getblocktxn"].block_txn_request.to_absolute()
            assert_equal(absolute_indexes, expected_result)

        def test_tip_after_message(node, peer, msg, tip):
            peer.send_and_ping(msg)
            assert_equal(int(node.getbestblockhash(), 16), tip)

        # First try announcing compactblocks that won't reconstruct, and verify
        # that we receive getblocktxn messages back.
        utxo = self.utxos.pop(0)

        block = self.build_block_with_transactions(node, utxo, 5)
        self.utxos.append(
            [block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
        comp_block = HeaderAndShortIDs()
        comp_block.initialize_from_block(block, use_witness=with_witness)

        test_getblocktxn_response(comp_block, test_node, [1, 2, 3, 4, 5])

        msg_bt = msg_no_witness_blocktxn()
        if with_witness:
            msg_bt = msg_blocktxn()  # serialize with witnesses
        msg_bt.block_transactions = BlockTransactions(block.sha256,
                                                      block.vtx[1:])
        test_tip_after_message(node, test_node, msg_bt, block.sha256)

        utxo = self.utxos.pop(0)
        block = self.build_block_with_transactions(node, utxo, 5)
        self.utxos.append(
            [block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])

        # Now try interspersing the prefilled transactions
        comp_block.initialize_from_block(block,
                                         prefill_list=[0, 1, 5],
                                         use_witness=with_witness)
        test_getblocktxn_response(comp_block, test_node, [2, 3, 4])
        msg_bt.block_transactions = BlockTransactions(block.sha256,
                                                      block.vtx[2:5])
        test_tip_after_message(node, test_node, msg_bt, block.sha256)

        # Now try giving one transaction ahead of time.
        utxo = self.utxos.pop(0)
        block = self.build_block_with_transactions(node, utxo, 5)
        self.utxos.append(
            [block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
        test_node.send_and_ping(msg_tx(block.vtx[1]))
        assert block.vtx[1].hash in node.getrawmempool()

        # Prefill 4 out of the 6 transactions, and verify that only the one
        # that was not in the mempool is requested.
        comp_block.initialize_from_block(block,
                                         prefill_list=[0, 2, 3, 4],
                                         use_witness=with_witness)
        test_getblocktxn_response(comp_block, test_node, [5])

        msg_bt.block_transactions = BlockTransactions(block.sha256,
                                                      [block.vtx[5]])
        test_tip_after_message(node, test_node, msg_bt, block.sha256)

        # Now provide all transactions to the node before the block is
        # announced and verify reconstruction happens immediately.
        utxo = self.utxos.pop(0)
        block = self.build_block_with_transactions(node, utxo, 10)
        self.utxos.append(
            [block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
        for tx in block.vtx[1:]:
            test_node.send_message(msg_tx(tx))
        test_node.sync_with_ping()
        # Make sure all transactions were accepted.
        mempool = node.getrawmempool()
        for tx in block.vtx[1:]:
            assert tx.hash in mempool

        # Clear out last request.
        with mininode_lock:
            test_node.last_message.pop("getblocktxn", None)

        # Send compact block
        comp_block.initialize_from_block(block,
                                         prefill_list=[0],
                                         use_witness=with_witness)
        test_tip_after_message(node, test_node,
                               msg_cmpctblock(comp_block.to_p2p()),
                               block.sha256)
        with mininode_lock:
            # Shouldn't have gotten a request for any transaction
            assert "getblocktxn" not in test_node.last_message

    # Incorrectly responding to a getblocktxn shouldn't cause the block to be
    # permanently failed.
    def test_incorrect_blocktxn_response(self, test_node):
        version = test_node.cmpct_version
        node = self.nodes[0]
        utxo = self.utxos.pop(0)

        block = self.build_block_with_transactions(node, utxo, 10)
        self.utxos.append(
            [block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
        # Relay the first 5 transactions from the block in advance
        for tx in block.vtx[1:6]:
            test_node.send_message(msg_tx(tx))
        test_node.sync_with_ping()
        # Make sure all transactions were accepted.
        mempool = node.getrawmempool()
        for tx in block.vtx[1:6]:
            assert tx.hash in mempool

        # Send compact block
        comp_block = HeaderAndShortIDs()
        comp_block.initialize_from_block(block,
                                         prefill_list=[0],
                                         use_witness=(version == 2))
        test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
        absolute_indexes = []
        with mininode_lock:
            assert "getblocktxn" in test_node.last_message
            absolute_indexes = test_node.last_message[
                "getblocktxn"].block_txn_request.to_absolute()
        assert_equal(absolute_indexes, [6, 7, 8, 9, 10])

        # Now give an incorrect response.
        # Note that it's possible for bitcoind to be smart enough to know we're
        # lying, since it could check to see if the shortid matches what we're
        # sending, and eg disconnect us for misbehavior.  If that behavior
        # change was made, we could just modify this test by having a
        # different peer provide the block further down, so that we're still
        # verifying that the block isn't marked bad permanently. This is good
        # enough for now.
        msg = msg_no_witness_blocktxn()
        if version == 2:
            msg = msg_blocktxn()
        msg.block_transactions = BlockTransactions(
            block.sha256, [block.vtx[5]] + block.vtx[7:])
        test_node.send_and_ping(msg)

        # Tip should not have updated
        assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)

        # We should receive a getdata request
        wait_until(lambda: "getdata" in test_node.last_message,
                   timeout=10,
                   lock=mininode_lock)
        assert_equal(len(test_node.last_message["getdata"].inv), 1)
        assert test_node.last_message["getdata"].inv[
            0].type == 2 or test_node.last_message["getdata"].inv[
                0].type == 2 | MSG_WITNESS_FLAG
        assert_equal(test_node.last_message["getdata"].inv[0].hash,
                     block.sha256)

        # Deliver the block
        if version == 2:
            test_node.send_and_ping(msg_block(block))
        else:
            test_node.send_and_ping(msg_no_witness_block(block))
        assert_equal(int(node.getbestblockhash(), 16), block.sha256)

    def test_getblocktxn_handler(self, test_node):
        version = test_node.cmpct_version
        node = self.nodes[0]
        # bitcoind will not send blocktxn responses for blocks whose height is
        # more than 10 blocks deep.
        MAX_GETBLOCKTXN_DEPTH = 10
        chain_height = node.getblockcount()
        current_height = chain_height
        while (current_height >= chain_height - MAX_GETBLOCKTXN_DEPTH):
            block_hash = node.getblockhash(current_height)
            block = FromHex(CBlock(), node.getblock(block_hash, False))

            msg = msg_getblocktxn()
            msg.block_txn_request = BlockTransactionsRequest(
                int(block_hash, 16), [])
            num_to_request = random.randint(1, len(block.vtx))
            msg.block_txn_request.from_absolute(
                sorted(random.sample(range(len(block.vtx)), num_to_request)))
            test_node.send_message(msg)
            wait_until(lambda: "blocktxn" in test_node.last_message,
                       timeout=10,
                       lock=mininode_lock)

            [tx.calc_sha256() for tx in block.vtx]
            with mininode_lock:
                assert_equal(
                    test_node.last_message["blocktxn"].block_transactions.
                    blockhash, int(block_hash, 16))
                all_indices = msg.block_txn_request.to_absolute()
                for index in all_indices:
                    tx = test_node.last_message[
                        "blocktxn"].block_transactions.transactions.pop(0)
                    tx.calc_sha256()
                    assert_equal(tx.sha256, block.vtx[index].sha256)
                    if version == 1:
                        # Witnesses should have been stripped
                        assert tx.wit.is_null()
                    else:
                        # Check that the witness matches
                        assert_equal(tx.calc_sha256(True),
                                     block.vtx[index].calc_sha256(True))
                test_node.last_message.pop("blocktxn", None)
            current_height -= 1

        # Next request should send a full block response, as we're past the
        # allowed depth for a blocktxn response.
        block_hash = node.getblockhash(current_height)
        msg.block_txn_request = BlockTransactionsRequest(
            int(block_hash, 16), [0])
        with mininode_lock:
            test_node.last_message.pop("block", None)
            test_node.last_message.pop("blocktxn", None)
        test_node.send_and_ping(msg)
        with mininode_lock:
            test_node.last_message["block"].block.calc_sha256()
            assert_equal(test_node.last_message["block"].block.sha256,
                         int(block_hash, 16))
            assert "blocktxn" not in test_node.last_message

    def test_compactblocks_not_at_tip(self, test_node):
        node = self.nodes[0]
        # Test that requesting old compactblocks doesn't work.
        MAX_CMPCTBLOCK_DEPTH = 5
        new_blocks = []
        for i in range(MAX_CMPCTBLOCK_DEPTH + 1):
            test_node.clear_block_announcement()
            new_blocks.append(node.generate(1)[0])
            wait_until(test_node.received_block_announcement,
                       timeout=30,
                       lock=mininode_lock)

        test_node.clear_block_announcement()
        test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
        wait_until(lambda: "cmpctblock" in test_node.last_message,
                   timeout=30,
                   lock=mininode_lock)

        test_node.clear_block_announcement()
        node.generate(1)
        wait_until(test_node.received_block_announcement,
                   timeout=30,
                   lock=mininode_lock)
        test_node.clear_block_announcement()
        with mininode_lock:
            test_node.last_message.pop("block", None)
        test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
        wait_until(lambda: "block" in test_node.last_message,
                   timeout=30,
                   lock=mininode_lock)
        with mininode_lock:
            test_node.last_message["block"].block.calc_sha256()
            assert_equal(test_node.last_message["block"].block.sha256,
                         int(new_blocks[0], 16))

        # Generate an old compactblock, and verify that it's not accepted.
        cur_height = node.getblockcount()
        hashPrevBlock = int(node.getblockhash(cur_height - 5), 16)
        block = self.build_block_on_tip(node)
        block.hashPrevBlock = hashPrevBlock
        block.solve()

        comp_block = HeaderAndShortIDs()
        comp_block.initialize_from_block(block)
        test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))

        tips = node.getchaintips()
        found = False
        for x in tips:
            if x["hash"] == block.hash:
                assert_equal(x["status"], "headers-only")
                found = True
                break
        assert found

        # Requesting this block via getblocktxn should silently fail
        # (to avoid fingerprinting attacks).
        msg = msg_getblocktxn()
        msg.block_txn_request = BlockTransactionsRequest(block.sha256, [0])
        with mininode_lock:
            test_node.last_message.pop("blocktxn", None)
        test_node.send_and_ping(msg)
        with mininode_lock:
            assert "blocktxn" not in test_node.last_message

    def test_end_to_end_block_relay(self, listeners):
        node = self.nodes[0]
        utxo = self.utxos.pop(0)

        block = self.build_block_with_transactions(node, utxo, 10)

        [l.clear_block_announcement() for l in listeners]

        # ToHex() won't serialize with witness, but this block has no witnesses
        # anyway. TODO: repeat this test with witness tx's to a segwit node.
        node.submitblock(ToHex(block))

        for l in listeners:
            wait_until(lambda: l.received_block_announcement(),
                       timeout=30,
                       lock=mininode_lock)
        with mininode_lock:
            for l in listeners:
                assert "cmpctblock" in l.last_message
                l.last_message[
                    "cmpctblock"].header_and_shortids.header.calc_sha256()
                assert_equal(
                    l.last_message["cmpctblock"].header_and_shortids.header.
                    sha256, block.sha256)

    # Test that we don't get disconnected if we relay a compact block with valid header,
    # but invalid transactions.
    def test_invalid_tx_in_compactblock(self, test_node, use_segwit=True):
        node = self.nodes[0]
        assert len(self.utxos)
        utxo = self.utxos[0]

        block = self.build_block_with_transactions(node, utxo, 5)
        del block.vtx[3]
        block.hashMerkleRoot = block.calc_merkle_root()
        if use_segwit:
            # If we're testing with segwit, also drop the coinbase witness,
            # but include the witness commitment.
            add_witness_commitment(block)
            block.vtx[0].wit.vtxinwit = []
        block.solve()

        # Now send the compact block with all transactions prefilled, and
        # verify that we don't get disconnected.
        comp_block = HeaderAndShortIDs()
        comp_block.initialize_from_block(block,
                                         prefill_list=[0, 1, 2, 3, 4],
                                         use_witness=use_segwit)
        msg = msg_cmpctblock(comp_block.to_p2p())
        test_node.send_and_ping(msg)

        # Check that the tip didn't advance
        assert int(node.getbestblockhash(), 16) is not block.sha256
        test_node.sync_with_ping()

    # Helper for enabling cb announcements
    # Send the sendcmpct request and sync headers
    def request_cb_announcements(self, peer):
        node = self.nodes[0]
        tip = node.getbestblockhash()
        peer.get_headers(locator=[int(tip, 16)], hashstop=0)

        msg = msg_sendcmpct()
        msg.version = peer.cmpct_version
        msg.announce = True
        peer.send_and_ping(msg)

    def test_compactblock_reconstruction_multiple_peers(
            self, stalling_peer, delivery_peer):
        node = self.nodes[0]
        assert len(self.utxos)

        def announce_cmpct_block(node, peer):
            utxo = self.utxos.pop(0)
            block = self.build_block_with_transactions(node, utxo, 5)

            cmpct_block = HeaderAndShortIDs()
            cmpct_block.initialize_from_block(block)
            msg = msg_cmpctblock(cmpct_block.to_p2p())
            peer.send_and_ping(msg)
            with mininode_lock:
                assert "getblocktxn" in peer.last_message
            return block, cmpct_block

        block, cmpct_block = announce_cmpct_block(node, stalling_peer)

        for tx in block.vtx[1:]:
            delivery_peer.send_message(msg_tx(tx))
        delivery_peer.sync_with_ping()
        mempool = node.getrawmempool()
        for tx in block.vtx[1:]:
            assert tx.hash in mempool

        delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
        assert_equal(int(node.getbestblockhash(), 16), block.sha256)

        self.utxos.append(
            [block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])

        # Now test that delivering an invalid compact block won't break relay

        block, cmpct_block = announce_cmpct_block(node, stalling_peer)
        for tx in block.vtx[1:]:
            delivery_peer.send_message(msg_tx(tx))
        delivery_peer.sync_with_ping()

        cmpct_block.prefilled_txn[0].tx.wit.vtxinwit = [CTxInWitness()]
        cmpct_block.prefilled_txn[0].tx.wit.vtxinwit[0].scriptWitness.stack = [
            ser_uint256(0)
        ]

        cmpct_block.use_witness = True
        delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
        assert int(node.getbestblockhash(), 16) != block.sha256

        msg = msg_no_witness_blocktxn()
        msg.block_transactions.blockhash = block.sha256
        msg.block_transactions.transactions = block.vtx[1:]
        stalling_peer.send_and_ping(msg)
        assert_equal(int(node.getbestblockhash(), 16), block.sha256)

    def run_test(self):
        # Setup the p2p connections
        self.segwit_node = self.nodes[0].add_p2p_connection(
            TestP2PConn(cmpct_version=2))
        self.old_node = self.nodes[0].add_p2p_connection(
            TestP2PConn(cmpct_version=1), services=NODE_NETWORK)
        self.additional_segwit_node = self.nodes[0].add_p2p_connection(
            TestP2PConn(cmpct_version=2))

        # We will need UTXOs to construct transactions in later tests.
        self.make_utxos()

        assert softfork_active(self.nodes[0], "segwit")

        self.log.info("Testing SENDCMPCT p2p message... ")
        self.test_sendcmpct(self.segwit_node, old_node=self.old_node)
        self.test_sendcmpct(self.additional_segwit_node)

        self.log.info("Testing compactblock construction...")
        self.test_compactblock_construction(self.old_node)
        self.test_compactblock_construction(self.segwit_node)

        self.log.info("Testing compactblock requests (segwit node)... ")
        self.test_compactblock_requests(self.segwit_node)

        self.log.info("Testing getblocktxn requests (segwit node)...")
        self.test_getblocktxn_requests(self.segwit_node)

        self.log.info(
            "Testing getblocktxn handler (segwit node should return witnesses)..."
        )
        self.test_getblocktxn_handler(self.segwit_node)
        self.test_getblocktxn_handler(self.old_node)

        self.log.info(
            "Testing compactblock requests/announcements not at chain tip...")
        self.test_compactblocks_not_at_tip(self.segwit_node)
        self.test_compactblocks_not_at_tip(self.old_node)

        self.log.info("Testing handling of incorrect blocktxn responses...")
        self.test_incorrect_blocktxn_response(self.segwit_node)

        self.log.info(
            "Testing reconstructing compact blocks from all peers...")
        self.test_compactblock_reconstruction_multiple_peers(
            self.segwit_node, self.additional_segwit_node)

        # Test that if we submitblock to node1, we'll get a compact block
        # announcement to all peers.
        # (Post-segwit activation, blocks won't propagate from node0 to node1
        # automatically, so don't bother testing a block announced to node0.)
        self.log.info("Testing end-to-end block relay...")
        self.request_cb_announcements(self.old_node)
        self.request_cb_announcements(self.segwit_node)
        self.test_end_to_end_block_relay([self.segwit_node, self.old_node])

        self.log.info("Testing handling of invalid compact blocks...")
        self.test_invalid_tx_in_compactblock(self.segwit_node)
        self.test_invalid_tx_in_compactblock(self.old_node)

        self.log.info("Testing invalid index in cmpctblock message...")
        self.test_invalid_cmpctblock_message()
class PTVTxnChains(ComparisonTestFramework):

    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.genesisactivationheight = 600
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.locking_script = CScript([self.coinbase_pubkey, OP_CHECKSIG])
        self.extra_args = [['-debug', '-genesisactivationheight=%d' % self.genesisactivationheight]] * self.num_nodes

    def run_test(self):
        self.test.run()

    # Sign a transaction, using the key we know about.
    # This signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        sighash = SignatureHashForkId(
            spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript(
            [self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))])

    def check_mempool(self, rpc, should_be_in_mempool, timeout=20):
        wait_until(lambda: set(rpc.getrawmempool()) == {t.hash for t in should_be_in_mempool}, timeout=timeout)

    # Generating transactions in order so first transaction's output will be an input for second transaction
    def get_chained_transactions(self, spend, num_of_transactions, money_to_spend=5000000000):
        txns = []
        for _ in range(0, num_of_transactions):
            money_to_spend = money_to_spend - 1000  # one satoshi to fee
            tx = create_transaction(spend.tx, spend.n, b"", money_to_spend, self.locking_script)
            self.sign_tx(tx, spend.tx, spend.n)
            tx.rehash()
            txns.append(tx)
            spend = PreviousSpendableOutput(tx, 0)
        return txns

    # Create a required number of chains with equal length.
    def get_txchains_n(self, num_of_chains, chain_length, spend):
        if num_of_chains > len(spend):
            raise Exception('Insufficient number of spendable outputs.')
        txchains = []
        for x in range(0, num_of_chains):
            txchains += self.get_chained_transactions(spend[x], chain_length)
        return txchains

    def run_scenario1(self, conn, num_of_chains, chain_length, spend, timeout):
        # Create and send tx chains.
        txchains = self.get_txchains_n(num_of_chains, chain_length, spend)
        for tx in range(len(txchains)):
            conn.send_message(msg_tx(txchains[tx]))
        # Check if the validation queues are empty.
        wait_until(lambda: self.nodes[0].rpc.getblockchainactivity()["transactions"] == 0, timeout=timeout)
        # Check if required transactions are accepted by the mempool.
        self.check_mempool(conn.rpc, txchains, timeout)

    def get_tests(self):
        rejected_txs = []
        def on_reject(conn, msg):
            rejected_txs.append(msg)
        # Shorthand for functions
        block = self.chain.next_block
        node = self.nodes[0]
        self.chain.set_genesis_hash(int(node.getbestblockhash(), 16))

        # Create a new block
        block(0, coinbase_pubkey=self.coinbase_pubkey)
        self.chain.save_spendable_output()
        yield self.accepted()

        # Now we need that block to mature so we can spend the coinbase.
        # Also, move block height on beyond Genesis activation.
        test = TestInstance(sync_every_block=False)
        for i in range(600):
            block(5000 + i, coinbase_pubkey=self.coinbase_pubkey)
            test.blocks_and_transactions.append([self.chain.tip, True])
            self.chain.save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on.
        out = []
        for i in range(200):
            out.append(self.chain.get_spendable_output())

        self.stop_node(0)

        num_of_threads = multiprocessing.cpu_count()

        # Scenario 1.
        # This test case shows that false-positive orphans are not created while processing a set of chains, where chainlength=10.
        # Each thread from the validaiton thread pool should have an assigned chain of txns to process.
        args = ['-maxorphantxsize=0', '-txnvalidationasynchrunfreq=100', '-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections('Scenario 1: {} chains of length 10. Storing orphans is disabled.'.format(num_of_threads),
                0, args, number_of_connections=1) as (conn,):
            # Run test case.
            self.run_scenario1(conn, num_of_threads, 10, out, timeout=20)

        # Scenario 2.
        # This test case shows that false-positive orphans are not created while processing a set of chains, where chainlength=20.
        # Each thread from the validaiton thread pool should have an assigned chain of txns to process.
        args = ['-maxorphantxsize=0', '-txnvalidationasynchrunfreq=0',
                '-limitancestorcount=20', '-limitdescendantcount=20', '-checkmempool=0', '-persistmempool=0'
                '-maxstdtxvalidationduration=100']
        with self.run_node_with_connections('Scenario 2: {} chains of length 20. Storing orphans is disabled.'.format(num_of_threads),
                0, args, number_of_connections=1) as (conn,):
            # Run test case.
            self.run_scenario1(conn, num_of_threads, 20, out, timeout=30)

        # Scenario 3.
        # This scenario will cause 'too-long-validation-time' reject reason to happen - during ptv processing.
        # If a given task has got a chain of 50 txns to process and 10th txn is rejected with 'too-long-validation-time' rejection reason, then
        # all remaining txns from the chain are detected as false-positive orphans.
        # Due to a runtime environment it is not possible to estimate the number of such rejects.
        args = ['-maxorphantxsize=10', '-txnvalidationasynchrunfreq=0',
                '-limitancestorcount=50', '-limitdescendantcount=50', '-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections("Scenario 3: 100 chains of length 50. Storing orphans is enabled.",
                0, args, number_of_connections=1) as (conn,):
            # Run test case.
            self.run_scenario1(conn, 100, 50, out, timeout=60)
Esempio n. 8
0
class FullBlockTest(ComparisonTestFramework):

    ''' Can either run this test as 1 node with expected answers, or two and compare them. 
        Change the "outcome" variable from each TestInstance object to only do the comparison. '''
    def __init__(self):
        self.num_nodes = 1
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(bytes("horsebattery"))
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.block_time = int(time.time())+1
        self.tip = None
        self.blocks = {}

    def run_test(self):
        test = TestManager(self, self.options.tmpdir)
        test.add_all_connections(self.nodes)
        NetworkThread().start() # Start up network handling in another thread
        test.run()

    def add_transactions_to_block(self, block, tx_list):
        [ tx.rehash() for tx in tx_list ]
        block.vtx.extend(tx_list)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        return block
    
    # Create a block on top of self.tip, and advance self.tip to point to the new block
    # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
    # and rest will go to fees.
    def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
        if self.tip == None:
            base_block_hash = self.genesis_hash
        else:
            base_block_hash = self.tip.sha256
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, self.block_time)
        if (spend != None):
            tx = CTransaction()
            tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff))  # no signature yet
            # This copies the java comparison tool testing behavior: the first
            # txout has a garbage scriptPubKey, "to make sure we're not
            # pre-verifying too much" (?)
            tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            # Now sign it if necessary
            scriptSig = ""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)
                scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            block = self.add_transactions_to_block(block, [tx])
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        self.block_time += 1
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previous marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected():
            return TestInstance([[self.tip, False]])
       
        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # creates a new block and advances the tip to that block
        block = self.next_block


        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()


        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(100):
            block(1000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test


        # Start by bulding a couple of blocks on top (which output is spent is in parentheses):
        #     genesis -> b1 (0) -> b2 (1)
        out0 = get_spendable_output()
        block(1, spend=out0)
        save_spendable_output()
        yield accepted()

        out1 = get_spendable_output()
        block(2, spend=out1)
        # Inv again, then deliver twice (shouldn't break anything).
        yield accepted()


        # so fork like this:
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1)
        # 
        # Nothing should happen at this point. We saw b2 first so it takes priority.
        tip(1)
        block(3, spend=out1)
        # Deliver twice (should still not break anything)
        yield rejected()


        # Now we add another block to make the alternative chain longer.
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1) -> b4 (2)
        out2 = get_spendable_output()
        block(4, spend=out2)
        yield accepted()


        # ... and back to the first chain.
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                      \-> b3 (1) -> b4 (2)
        tip(2)
        block(5, spend=out2)
        save_spendable_output()
        yield rejected()

        out3 = get_spendable_output()
        block(6, spend=out3)
        yield accepted()


        # Try to create a fork that double-spends
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                          \-> b7 (2) -> b8 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(7, spend=out2)
        yield rejected()

        out4 = get_spendable_output()
        block(8, spend=out4)
        yield rejected()


        # Try to create a block that has too much fee
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                                    \-> b9 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(6)
        block(9, spend=out4, additional_coinbase_value=1)
        yield rejected()

        
        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b10 (3) -> b11 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(10, spend=out3)
        yield rejected()

        block(11, spend=out4, additional_coinbase_value=1)
        yield rejected()


        # Try again, but with a valid fork first
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
        #                                              (b12 added last)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        b12 = block(12, spend=out3)
        save_spendable_output()
        #yield TestInstance([[b12, False]])
        b13 = block(13, spend=out4)
        # Deliver the block header for b12, and the block b13.
        # b13 should be accepted but the tip won't advance until b12 is delivered.
        yield TestInstance([[CBlockHeader(b12), None], [b13, False]])

        save_spendable_output()
        out5 = get_spendable_output()
        # b14 is invalid, but the node won't know that until it tries to connect
        # Tip still can't advance because b12 is missing
        block(14, spend=out5, additional_coinbase_value=1)
        yield rejected()

        yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.

        
        # Test that a block with a lot of checksigs is okay
        lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1))
        tip(13)
        block(15, spend=out5, script=lots_of_checksigs)
        yield accepted()


        # Test that a block with too many checksigs is rejected
        out6 = get_spendable_output()
        too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50))
        block(16, spend=out6, script=too_many_checksigs)
        yield rejected()
class FullBlockTest(ComparisonTestFramework):

    # Can either run this test as 1 node with expected answers, or two and compare them.
    # Change the "outcome" variable from each TestInstance object to only do the comparison.
    def __init__(self):
        super().__init__()
        self.num_nodes = 1
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"fatstacks")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.forkid_key = CECKey()
        self.forkid_key.set_secretbytes(b"forkid")
        self.forkid_pubkey = self.forkid_key.get_pubkey()
        self.tip = None
        self.blocks = {}

    def setup_network(self):
        self.extra_args = [[
            '-debug', '-norelaypriority',
            "-uahfstarttime=%d" % UAHF_START_TIME, '-whitelist=127.0.0.1',
            '-par=1'
        ]]
        self.nodes = start_nodes(self.num_nodes,
                                 self.options.tmpdir,
                                 self.extra_args,
                                 binary=[self.options.testbinary])

    def add_options(self, parser):
        super().add_options(parser)
        parser.add_option("--runbarelyexpensive",
                          dest="runbarelyexpensive",
                          default=True)

    def run_test(self):
        self.test = TestManager(self, self.options.tmpdir)
        self.test.add_all_connections(self.nodes)
        # Start up network handling in another thread
        NetworkThread().start()
        # Mock the time so that block activating the HF will be accepted
        self.nodes[0].setmocktime(UAHF_START_TIME)
        self.test.run()

    def add_transactions_to_block(self, block, tx_list):
        [tx.rehash() for tx in tx_list]
        block.vtx.extend(tx_list)

    # this is a little handier to use than the version in blocktools.py
    def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        tx = create_transaction(spend_tx, n, b"", value, script)
        return tx

    # sign a transaction, using the key we know about
    # this signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
            tx.vin[0].scriptSig = CScript()
            return
        (sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0,
                                       SIGHASH_ALL)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))
        ])

    def create_and_sign_transaction(self,
                                    spend_tx,
                                    n,
                                    value,
                                    script=CScript([OP_TRUE])):
        tx = self.create_tx(spend_tx, n, value, script)
        self.sign_tx(tx, spend_tx, n)
        tx.rehash()
        return tx

    def next_block(self,
                   number,
                   spend=None,
                   additional_coinbase_value=0,
                   script=None,
                   extra_sigops=0,
                   block_size=0,
                   solve=True):
        """
        Create a block on top of self.tip, and advance self.tip to point to the new block
        if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend
        output, and rest will go to fees.
        """
        if self.tip == None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[
                spend.n].nValue - 1  # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, block_time)
        spendable_output = None
        if (spend != None):
            tx = CTransaction()
            tx.vin.append(
                CTxIn(COutPoint(spend.tx.sha256, spend.n), b"",
                      0xffffffff))  # no signature yet
            # This copies the java comparison tool testing behavior: the first
            # txout has a garbage scriptPubKey, "to make sure we're not
            # pre-verifying too much" (?)
            tx.vout.append(
                CTxOut(0, CScript([random.randint(0, 255), height & 255])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            spendable_output = PreviousSpendableOutput(tx, 0)

            # Now sign it if necessary
            scriptSig = b""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                (sighash,
                 err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx,
                                      0, SIGHASH_ALL)
                scriptSig = CScript([
                    self.coinbase_key.sign(sighash) +
                    bytes(bytearray([SIGHASH_ALL]))
                ])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
        if spendable_output != None and block_size > 0:
            while len(block.serialize()) < block_size:
                tx = CTransaction()
                script_length = block_size - len(block.serialize()) - 79
                if script_length > 510000:
                    script_length = 500000
                tx_sigops = min(extra_sigops, script_length,
                                MAX_TX_SIGOPS_COUNT)
                extra_sigops -= tx_sigops
                script_pad_len = script_length - tx_sigops
                script_output = CScript([b'\x00' * script_pad_len] +
                                        [OP_CHECKSIG] * tx_sigops)
                tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
                tx.vout.append(CTxOut(0, script_output))
                tx.vin.append(
                    CTxIn(
                        COutPoint(spendable_output.tx.sha256,
                                  spendable_output.n)))
                spendable_output = PreviousSpendableOutput(tx, 0)
                self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
            # Make sure the math above worked out to produce the correct block size
            # (the math will fail if there are too many transactions in the block)
            assert_equal(len(block.serialize()), block_size)
            # Make sure all the requested sigops have been included
            assert_equal(extra_sigops, 0)
        if solve:
            block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject=None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # adds transactions to the block and updates state
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            self.add_transactions_to_block(block, new_transactions)
            old_sha256 = block.sha256
            block.hashMerkleRoot = block.calc_merkle_root()
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            if block.sha256 != old_sha256:
                self.block_heights[
                    block.sha256] = self.block_heights[old_sha256]
                del self.block_heights[old_sha256]
            self.blocks[block_number] = block
            return block

        # shorthand for functions
        block = self.next_block
        node = self.nodes[0]

        # Create a new block
        block(0, block_size=LEGACY_MAX_BLOCK_SIZE)
        save_spendable_output()
        yield accepted()

        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(5000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test

        # collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(100):
            out.append(get_spendable_output())

        # block up to LEGACY_MAX_BLOCK_SIZE are accepted.
        block(1, spend=out[0], block_size=LEGACY_MAX_BLOCK_SIZE)
        yield accepted()

        # bigger block are reject as the fork isn't activated yet.
        block(2, spend=out[1], block_size=LEGACY_MAX_BLOCK_SIZE + 1)
        yield rejected(RejectResult(16, b'bad-blk-length'))

        # Rewind bad block
        tip(1)

        # Create a transaction that we will use to test SIGHASH_FORID
        script_forkid = CScript([self.forkid_pubkey, OP_CHECKSIG])
        tx_forkid = self.create_and_sign_transaction(out[1].tx, out[1].n, 1,
                                                     script_forkid)

        # Create a block that would activate the HF. We also add the
        # transaction that will allow us to test SIGHASH_FORKID
        b03 = block(3)
        b03.nTime = UAHF_START_TIME
        update_block(3, [tx_forkid])
        yield accepted()

        # Pile up 4 blocks on top to get to the point just before activation.
        block(4, spend=out[2])
        yield accepted()
        block(5, spend=out[3])
        yield accepted()
        block(6, spend=out[4])
        yield accepted()
        block(7, spend=out[5])
        yield accepted()

        # bigger block are still rejected as the fork isn't activated yet.
        block(8, spend=out[6], block_size=LEGACY_MAX_BLOCK_SIZE + 1)
        yield rejected(RejectResult(16, b'bad-blk-length'))

        # Rewind bad block
        tip(7)

        # build a transaction using SIGHASH_FORKID
        tx_spend = self.create_tx(tx_forkid, 0, 1, CScript([OP_TRUE]))
        sighash_spend = SignatureHashForkId(script_forkid, tx_spend, 0,
                                            SIGHASH_FORKID | SIGHASH_ALL, 1)
        sig_forkid = self.forkid_key.sign(sighash_spend)
        tx_spend.vin[0].scriptSig = CScript(
            [sig_forkid + bytes(bytearray([SIGHASH_FORKID | SIGHASH_ALL]))])
        tx_spend.rehash()

        # This transaction can't get into the mempool yet
        try:
            node.sendrawtransaction(ToHex(tx_spend))
        except JSONRPCException as exp:
            assert_equal(exp.error["message"], RPC_SIGHASH_FORKID_ERROR)
        else:
            assert (False)

        # The transaction is rejected, so the mempool should still be empty
        assert_equal(set(node.getrawmempool()), set())

        # check that SIGHASH_FORKID transaction are still rejected
        block(9)
        update_block(9, [tx_spend])
        yield rejected(RejectResult(16, SIGHASH_INVALID_ERROR))

        # Rewind bad block
        tip(7)

        # Pile up another block, to activate. OP_RETURN anti replay
        # outputs are still considered valid.
        antireplay_script = CScript([OP_RETURN, ANTI_REPLAY_COMMITMENT])
        block(10, spend=out[6], script=antireplay_script)
        yield accepted()

        # Now that the HF is activated, replay protected tx are
        # accepted in the mempool
        tx_spend_id = node.sendrawtransaction(ToHex(tx_spend))
        assert_equal(set(node.getrawmempool()), {tx_spend_id})

        # HF is active now, we MUST create a big block.
        block(11, spend=out[7], block_size=LEGACY_MAX_BLOCK_SIZE)
        yield rejected(RejectResult(16, b'bad-blk-too-small'))

        # Rewind bad block
        tip(10)

        # HF is active, now we can create bigger blocks and use
        # SIGHASH_FORKID replay protection.
        block(12, spend=out[7], block_size=LEGACY_MAX_BLOCK_SIZE + 1)
        update_block(12, [tx_spend])
        yield accepted()

        # We save this block id to test reorg
        fork_block_id = node.getbestblockhash()

        # The transaction has been mined, it's not in the mempool anymore
        assert_equal(set(node.getrawmempool()), set())

        # Test OP_RETURN replay protection
        block(13, spend=out[8], script=antireplay_script)
        yield rejected(RejectResult(16, b'bad-txn-replay'))

        # Rewind bad block
        tip(12)

        # Check that only the first block has to be > 1MB
        block(14, spend=out[8])
        yield accepted()

        # Now we reorg just when the HF activated. The
        # SIGHASH_FORKID transaction is back in the mempool
        node.invalidateblock(fork_block_id)
        assert (tx_spend_id in set(node.getrawmempool()))

        # And now just before when the HF activated. The
        # SIGHASH_FORKID should be kicked out the mempool
        node.invalidateblock(node.getbestblockhash())
        assert (tx_spend_id not in set(node.getrawmempool()))
Esempio n. 10
0
class PtvCpfp(BitcoinTestFramework):
    def set_test_params(self):
        self.setup_clean_chain = True
        self.num_nodes = 1

        self.genesisactivationheight = 150
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.locking_script = CScript([self.coinbase_pubkey, OP_CHECKSIG])
        self.locking_script2 = CScript([b"X" * 10, OP_DROP, OP_TRUE])
        self.default_args = [
            '-debug', '-maxgenesisgracefulperiod=0',
            '-genesisactivationheight=%d' % self.genesisactivationheight
        ]
        self.extra_args = [self.default_args] * self.num_nodes

    def setup_network(self):
        self.setup_nodes()

    def setup_nodes(self):
        self.add_nodes(self.num_nodes)

    def check_intersec_with_mempool(self, rpc, txs_set):
        return set(rpc.getrawmempool()).intersection(t.hash for t in txs_set)

    def check_mempool_with_subset(self, rpc, should_be_in_mempool, timeout=20):
        wait_until(lambda: {t.hash
                            for t in should_be_in_mempool}.issubset(
                                set(rpc.getrawmempool())),
                   timeout=timeout)

    def send_txs(self,
                 rpcsend,
                 conn,
                 txs,
                 exp_mempool_size,
                 timeout=300,
                 check_interval=0.1):
        conn = None if rpcsend is not None else conn
        if conn is not None:
            req_start_time = time.time()
            for tx in txs:
                conn.send_message(msg_tx(tx))
            wait_for_ptv_completion(conn,
                                    exp_mempool_size,
                                    timeout=timeout,
                                    check_interval=check_interval)
            elapsed = time.time() - req_start_time
        elif rpcsend is not None:
            elapsed = self.rpc_send_txs(rpcsend, txs)
        else:
            raise Exception("Unspecified interface!")
        return elapsed

    def rpc_send_txs(self, rpcsend, txs):
        if "sendrawtransaction" == rpcsend._service_name:
            req_start_time = time.time()
            for tx in txs:
                rpcsend(ToHex(tx))
            elapsed = time.time() - req_start_time
        elif "sendrawtransactions" == rpcsend._service_name:
            rpc_txs_bulk_input = []
            for tx in txs:
                rpc_txs_bulk_input.append({'hex': ToHex(tx)})
            req_start_time = time.time()
            rpcsend(rpc_txs_bulk_input)
            elapsed = time.time() - req_start_time
        else:
            raise Exception("Unsupported rpc method!")
        return elapsed

    # Sign a transaction, using the key we know about.
    # This signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    def create_tx(self, outpoints, noutput, feerate, locking_script):
        tx = CTransaction()
        total_input = 0
        for parent_tx, n in outpoints:
            tx.vin.append(
                CTxIn(COutPoint(parent_tx.sha256, n), b"", 0xffffffff))
            total_input += parent_tx.vout[n].nValue

        for _ in range(noutput):
            tx.vout.append(CTxOut(total_input // noutput, locking_script))

        tx.rehash()

        tx_size = len(tx.serialize())
        fee_per_output = int(tx_size * feerate // noutput)

        for output in tx.vout:
            output.nValue -= fee_per_output

        if locking_script == self.locking_script:
            for parent_tx, n in outpoints:
                self.sign_tx(tx, parent_tx, n)

        tx.rehash()
        return tx

    def generate_txchain(self, fund_txn, vout_idx, chain_length, tx_fee,
                         locking_script):
        txs = []
        req_start_time = time.time()
        txs.append(
            self.create_tx([(fund_txn, vout_idx)], 1, tx_fee, locking_script))
        for idx in range(chain_length - 1):
            txs.append(
                self.create_tx([(txs[idx], 0)], 1, tx_fee, locking_script))
        self.log.info("Generate txchain[%d] of length %d, took: %.6f sec",
                      vout_idx, chain_length,
                      time.time() - req_start_time)
        return txs[chain_length - 1], txs

    def generate_txchains(self, fund_txn, chain_length, num_of_chains, tx_fee,
                          locking_script):
        txs = []
        last_descendant_from_each_txchain = []
        req_start_time = time.time()
        for chain_idx in range(num_of_chains):
            last_descendant_in_txchain, txchain = self.generate_txchain(
                fund_txn, chain_idx, chain_length, tx_fee, locking_script)
            txs.extend(txchain)
            last_descendant_from_each_txchain.append(
                last_descendant_in_txchain)
        self.log.info(
            "The total time to generate all %d txchains (of length %d): %.6f sec",
            num_of_chains, chain_length,
            time.time() - req_start_time)
        return last_descendant_from_each_txchain, txs

    def create_fund_txn(self,
                        conn,
                        noutput,
                        tx_fee,
                        locking_script,
                        pubkey=None):
        # create a new block with coinbase
        last_block_info = conn.rpc.getblock(conn.rpc.getbestblockhash())
        coinbase = create_coinbase(height=last_block_info["height"] + 1,
                                   pubkey=pubkey)
        new_block = create_block(int(last_block_info["hash"], 16),
                                 coinbase=coinbase,
                                 nTime=last_block_info["time"] + 1)
        new_block.nVersion = last_block_info["version"]
        new_block.solve()
        conn.send_message(msg_block(new_block))
        wait_until(lambda: conn.rpc.getbestblockhash() == new_block.hash,
                   check_interval=0.3)
        # mature the coinbase
        conn.rpc.generate(100)
        # create and send a funding txn
        funding_tx = self.create_tx([(coinbase, 0)], 2, 1.5, locking_script)
        conn.send_message(msg_tx(funding_tx))
        check_mempool_equals(conn.rpc, [funding_tx])
        conn.rpc.generate(1)
        # create a new txn which pays the specified tx_fee
        new_tx = self.create_tx([(funding_tx, 0)], noutput, tx_fee,
                                locking_script)
        last_block_info = conn.rpc.getblock(conn.rpc.getbestblockhash())
        new_block = create_block(
            int(last_block_info["hash"], 16),
            coinbase=create_coinbase(height=last_block_info["height"] + 1),
            nTime=last_block_info["time"] + 1)
        new_block.nVersion = last_block_info["version"]
        new_block.vtx.append(new_tx)
        new_block.hashMerkleRoot = new_block.calc_merkle_root()
        new_block.calc_sha256()
        new_block.solve()

        conn.send_message(msg_block(new_block))
        wait_until(lambda: conn.rpc.getbestblockhash() == new_block.hash,
                   check_interval=0.3)

        return new_tx

    # Submit all cpfp txs and measure overall time duration of this process.
    def run_cpfp_scenario1(self,
                           conn,
                           txchains,
                           last_descendant_from_each_txchain,
                           chain_length,
                           num_of_chains,
                           mining_fee,
                           locking_script,
                           rpcsend=None,
                           timeout=240):
        #
        # Send low fee (paying relay_fee) txs to the node.
        #
        exp_mempool_size = conn.rpc.getmempoolinfo()['size'] + len(txchains)
        elapsed1 = self.send_txs(rpcsend, conn, txchains, exp_mempool_size,
                                 timeout)
        # Check if mempool contains all low fee txs.
        check_mempool_equals(conn.rpc, txchains, timeout)

        #
        # Check getminingcandidate result: There should be no cpfp txs in the block template, due to low fees.
        #
        wait_until(lambda: conn.rpc.getminingcandidate()["num_tx"] == 1
                   )  # there should be coinbase tx only

        #
        # Create and send cpfp txs (paying mining_fee).
        #
        cpfp_txs_pay_for_ancestors = []
        for tx in last_descendant_from_each_txchain:
            cpfp_txs_pay_for_ancestors.append(
                self.create_tx([(tx, 0)], 2, (chain_length + 1) * (mining_fee),
                               locking_script))
        # Send cpfp txs.
        exp_mempool_size = conn.rpc.getmempoolinfo()['size'] + len(
            cpfp_txs_pay_for_ancestors)
        elapsed2 = self.send_txs(rpcsend, conn, cpfp_txs_pay_for_ancestors,
                                 exp_mempool_size, timeout)
        # Check if there is a required number of txs in the mempool.
        check_mempool_equals(conn.rpc, cpfp_txs_pay_for_ancestors + txchains,
                             timeout)

        #
        # Check getminingcandidate result: There should be all cpfp txs (+ ancestor txs) in the block template.
        #
        wait_until(lambda: conn.rpc.getminingcandidate()["num_tx"] == len(
            cpfp_txs_pay_for_ancestors + txchains) + 1)  # +1 a coinbase tx

        #
        # Collect stats.
        #
        interface_name = "p2p" if rpcsend is None else rpcsend._service_name
        self.log.info(
            "[%s]: Submit and process %d txchains of length %d (%d relay_fee std txs) [time duration: %.6f sec]",
            interface_name, num_of_chains, chain_length,
            num_of_chains * chain_length, elapsed1)
        self.log.info(
            "[%s]: Submit and process %d cpfp std txs (each pays mining_fee) [time duration: %.6f sec]",
            interface_name, len(cpfp_txs_pay_for_ancestors), elapsed2)
        self.log.info(
            "[%s]: Total time to submit and process %d std txs took %.6f sec",
            interface_name,
            num_of_chains * chain_length + len(cpfp_txs_pay_for_ancestors),
            elapsed1 + elapsed2)

        return elapsed1 + elapsed2

    # Submit txs paying a higher fee than any other tx present in the mempool:
    # - at this stage it is expected that the mempool is full
    # - this process triggers mempool's eviction (for both the primary and the secondary mempools)
    # - 'mempoolminfee' is set to a non zero value
    def run_cpfp_scenario1_override_txs(self,
                                        conn,
                                        high_fee_tx,
                                        mining_fee,
                                        locking_script,
                                        rpcsend=None,
                                        timeout=240):
        txs = []
        rolling_fee = mining_fee + 1000
        tx0 = self.create_tx([(high_fee_tx, 0)], 1, rolling_fee,
                             locking_script)
        tx0_size = len(tx0.serialize())
        # send tx0 over rpc interface to trigger eviction.
        self.rpc_send_txs(conn.rpc.sendrawtransactions, [tx0])
        # calculate rolling fee to satisfy current 'mempoolminfee' requirements.
        rolling_fee = float(conn.rpc.getmempoolinfo()["mempoolminfee"] *
                            COIN) * float(tx0_size / 1024)
        # send the rest of txs.
        for idx in range(1, len(high_fee_tx.vout)):
            txs.append(
                self.create_tx([(high_fee_tx, idx)], 1, rolling_fee,
                               locking_script))
        exp_min_mempool_size = conn.rpc.getmempoolinfo()['size']
        self.send_txs(rpcsend, conn, txs, exp_min_mempool_size, timeout)
        txs.append(tx0)
        # All newly submitted txs should be present in the mempool.
        self.check_mempool_with_subset(conn.rpc, txs)
        return txs

    def test_case1(self, timeout=300):
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=0',
            '-limitancestorcount=1001',
            '-maxmempool=416784kB',  # the mempool size when 300300 std txs are accepted
            '-blockmintxfee=0.00001',
            '-maxorphantxsize=600MB',
            '-maxmempoolsizedisk=0',
            '-disablebip30checks=1',
            '-checkmempool=0',
            '-persistmempool=0',
            # A new CPFP config params:
            '-mempoolmaxpercentcpfp=100',
            '-limitcpfpgroupmemberscount=1001',
        ]
        with self.run_node_with_connections(
                "Scenario 1: Long cpfp std tx chains, non-whitelisted peer",
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):

            mining_fee = 1.01  # in satoshi per byte
            relay_fee = float(conn.rpc.getnetworkinfo()["relayfee"] * COIN /
                              1000) + 0.15  # in satoshi per byte

            # Create a low and high fee txn.
            low_fee_std_tx = self.create_fund_txn(conn,
                                                  300,
                                                  relay_fee,
                                                  self.locking_script,
                                                  pubkey=self.coinbase_pubkey)
            high_fee_nonstd_tx = self.create_fund_txn(conn, 30000, mining_fee,
                                                      self.locking_script2)

            self.stop_node(0)
            # Prevent RPC timeout for sendrawtransactions call that take longer to return.
            self.nodes[0].rpc_timeout = 600

            # Time duration to submit and process txs.
            p2p_td = 0  # ... through p2p interface
            rpc_td = 0  # ... through rpc interface

            # Generate low fee cpfp std txn chains:
            # - 300K txs: 300 chains of length 1000
            txchain_length = 1000
            num_of_txchains = 300
            last_descendant_from_each_txchain, txchains = self.generate_txchains(
                low_fee_std_tx, txchain_length, num_of_txchains, relay_fee,
                self.locking_script)

            #
            # Send txs through P2P interface.
            #
            TC_1_1_msg = "TC_1_1: Send {} txs (num_of_txchains= {}, txchain_length= {}) through P2P interface"
            with self.run_node_with_connections(
                    TC_1_1_msg.format(txchain_length * num_of_txchains,
                                      num_of_txchains, txchain_length),
                    0,
                    args + self.default_args,
                    number_of_connections=1) as (conn, ):
                p2p_td = self.run_cpfp_scenario1(
                    conn,
                    txchains,
                    last_descendant_from_each_txchain,
                    txchain_length,
                    num_of_txchains,
                    mining_fee,
                    self.locking_script,
                    timeout=timeout)
                # Uses high_fee_nonstd_tx to generate 30K high fee nonstandard txs
                self.run_cpfp_scenario1_override_txs(conn,
                                                     high_fee_nonstd_tx,
                                                     mining_fee,
                                                     self.locking_script2,
                                                     timeout=timeout)

            #
            # Send txs through sendrawtransactions rpc interface (a bulk submit).
            #
            TC_1_2_msg = "TC_1_2: Send {} txs (num_of_chains= {}, chain_length= {}) through RPC interface (a bulk submit)"
            with self.run_node_with_connections(
                    TC_1_2_msg.format(txchain_length * num_of_txchains,
                                      num_of_txchains, txchain_length),
                    0,
                    args + self.default_args,
                    number_of_connections=1) as (conn, ):
                rpc = conn.rpc
                rpc_td = self.run_cpfp_scenario1(
                    conn,
                    txchains,
                    last_descendant_from_each_txchain,
                    txchain_length,
                    num_of_txchains,
                    mining_fee,
                    self.locking_script,
                    rpc.sendrawtransactions,
                    timeout=timeout)
                # Uses high_fee_nonstd_tx to generate 30K high fee nonstandard txs
                self.run_cpfp_scenario1_override_txs(conn,
                                                     high_fee_nonstd_tx,
                                                     mining_fee,
                                                     self.locking_script2,
                                                     rpc.sendrawtransactions,
                                                     timeout=timeout)

            # Check that rpc interface is faster than p2p
            assert_greater_than(p2p_td, rpc_td)

    def run_test(self):
        # Test long chains of cpfp txs.
        self.test_case1(timeout=7200)
Esempio n. 11
0
    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        privkey = b"aa3680d5d48a8283413f7a108367c7299ca73f553735860a87b08f39395618b7"
        key = CECKey()
        key.set_secretbytes(privkey)
        key.set_compressed(True)
        pubkey = CPubKey(key.get_pubkey())
        pubkeyhash = hash160(pubkey)
        SCRIPT_PUB_KEY = CScript([
            CScriptOp(OP_DUP),
            CScriptOp(OP_HASH160), pubkeyhash,
            CScriptOp(OP_EQUALVERIFY),
            CScriptOp(OP_CHECKSIG)
        ])

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        block = create_block(tip, create_coinbase(height, pubkey), block_time)
        block.solve(self.signblockprivkey)
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block], node, success=True)

        # b'\x64' is OP_NOTIF
        # Transaction will be rejected with code 16 (REJECT_INVALID)
        self.log.info('Test a transaction that is rejected')
        tx1 = create_tx_with_script(block1.vtx[0],
                                    0,
                                    script_sig=b'\x64' * 35,
                                    amount=50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1],
                                   node,
                                   success=False,
                                   expect_disconnect=False)

        # Make two p2p connections to provide the node with orphans
        # * p2ps[0] will send valid orphan txs (one with low fee)
        # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
        self.reconnect_p2p(num_connections=2)

        self.log.info('Test orphan transaction handling ... ')
        # Create a root transaction that we withhold until all dependend transactions
        # are sent out and in the orphan cache
        tx_withhold = CTransaction()
        tx_withhold.vin.append(
            CTxIn(outpoint=COutPoint(block1.vtx[0].malfixsha256, 0)))
        tx_withhold.vout.append(
            CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY))
        tx_withhold.calc_sha256()
        (sighash, err) = SignatureHash(CScript([pubkey, OP_CHECKSIG]),
                                       tx_withhold, 0, SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_withhold.vin[0].scriptSig = CScript([signature])

        # Our first orphan tx with some outputs to create further orphan txs
        tx_orphan_1 = CTransaction()
        tx_orphan_1.vin.append(
            CTxIn(outpoint=COutPoint(tx_withhold.malfixsha256, 0)))
        tx_orphan_1.vout = [
            CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY)
        ] * 3
        tx_orphan_1.calc_sha256()
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_1, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_1.vin[0].scriptSig = CScript([signature, pubkey])

        # A valid transaction with low fee
        tx_orphan_2_no_fee = CTransaction()
        tx_orphan_2_no_fee.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.malfixsha256, 0)))
        tx_orphan_2_no_fee.vout.append(
            CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY))
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_2_no_fee, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_2_no_fee.vin[0].scriptSig = CScript([signature, pubkey])

        # A valid transaction with sufficient fee
        tx_orphan_2_valid = CTransaction()
        tx_orphan_2_valid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.malfixsha256, 1)))
        tx_orphan_2_valid.vout.append(
            CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY))
        tx_orphan_2_valid.calc_sha256()
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_2_valid, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_2_valid.vin[0].scriptSig = CScript([signature, pubkey])

        # An invalid transaction with negative fee
        tx_orphan_2_invalid = CTransaction()
        tx_orphan_2_invalid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.malfixsha256, 2)))
        tx_orphan_2_invalid.vout.append(
            CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY))
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_2_invalid, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_2_invalid.vin[0].scriptSig = CScript([signature, pubkey])

        self.log.info('Send the orphans ... ')
        # Send valid orphan txs from p2ps[0]
        node.p2p.send_txs_and_test(
            [tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid],
            node,
            success=False)
        # Send invalid tx from p2ps[1]
        node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid],
                                       node,
                                       success=False)

        assert_equal(0,
                     node.getmempoolinfo()['size'])  # Mempool should be empty
        assert_equal(2, len(node.getpeerinfo()))  # p2ps[1] is still connected

        self.log.info('Send the withhold tx ... ')
        node.p2p.send_txs_and_test([tx_withhold], node, success=True)

        # Transactions that should end up in the mempool
        expected_mempool = {
            t.hashMalFix
            for t in [
                tx_withhold,  # The transaction that is the root for all orphans
                tx_orphan_1,  # The orphan transaction that splits the coins
                tx_orphan_2_valid,  # The valid transaction (with sufficient fee)
            ]
        }
        # Transactions that do not end up in the mempool
        # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
        # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)

        wait_until(lambda: 1 == len(node.getpeerinfo()),
                   timeout=12)  # p2ps[1] is no longer connected
        assert_equal(expected_mempool, set(node.getrawmempool()))

        # restart node with sending BIP61 messages disabled, check that it disconnects without sending the reject message
        self.log.info(
            'Test a transaction that is rejected, with BIP61 disabled')
        self.restart_node(0, ['-enablebip61=0', '-persistmempool=0'])
        self.reconnect_p2p(num_connections=1)
        node.p2p.send_txs_and_test([tx1],
                                   node,
                                   success=False,
                                   expect_disconnect=False)
        # send_txs_and_test will have waited for disconnect, so we can safely check that no reject has been received
        assert_equal(node.p2p.reject_code_received, None)
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PDataStore())
        self.nodeaddress = self.nodes[0].getnewaddress()
        self.pubkey = self.nodes[0].getaddressinfo(self.nodeaddress)["pubkey"]
        self.log.info("Mining %d blocks", CHAIN_HEIGHT)
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(
                CHAIN_HEIGHT, self.signblockprivkeys)
        ]

        ##  P2PKH transaction
        ########################
        self.log.info("Test using a P2PKH transaction")
        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[0],
                                     self.nodeaddress,
                                     amount=10)
        spendtx.rehash()
        copy_spendTx = CTransaction(spendtx)

        #cache hashes
        hash = spendtx.hash
        hashMalFix = spendtx.hashMalFix

        #malleate
        unDERify(spendtx)
        spendtx.rehash()

        # verify that hashMalFix remains the same even when signature is malleated and hash changes
        assert_not_equal(hash, spendtx.hash)
        assert_equal(hashMalFix, spendtx.hashMalFix)

        # verify that hash is spendtx.serialize()
        hash = encode(hash256(spendtx.serialize())[::-1],
                      'hex_codec').decode('ascii')
        assert_equal(hash, spendtx.hash)

        # verify that hashMalFix is spendtx.serialize(with_scriptsig=False)
        hashMalFix = encode(
            hash256(spendtx.serialize(with_scriptsig=False))[::-1],
            'hex_codec').decode('ascii')
        assert_equal(hashMalFix, spendtx.hashMalFix)

        assert_not_equal(hash, hashMalFix)
        #as this transaction does not have witness data the following is true
        assert_equal(spendtx.serialize(),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_not_equal(
            spendtx.serialize(with_witness=False),
            spendtx.serialize(with_witness=True, with_scriptsig=False))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=True),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=False),
                     spendtx.serialize_without_witness(with_scriptsig=False))

        #Create block with only non-DER signature P2PKH transaction
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 1),
                             block_time)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        # serialize with and without witness block remains the same
        assert_equal(block.serialize(with_witness=True), block.serialize())
        assert_equal(block.serialize(with_witness=True),
                     block.serialize(with_witness=False))
        assert_equal(block.serialize(with_witness=True),
                     block.serialize(with_witness=False, with_scriptsig=True))

        self.log.info("Reject block with non-DER signature")
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(),
                   lock=mininode_lock)
        with mininode_lock:
            assert_equal(self.nodes[0].p2p.last_message["reject"].code,
                         REJECT_INVALID)
            assert_equal(self.nodes[0].p2p.last_message["reject"].data,
                         block.sha256)
            assert_equal(self.nodes[0].p2p.last_message["reject"].reason,
                         b'block-validation-failed')

        self.log.info("Accept block with DER signature")
        #recreate block with DER sig transaction
        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 1),
                             block_time)
        block.vtx.append(copy_spendTx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        ##  P2SH transaction
        ########################
        self.log.info("Test using P2SH transaction ")

        REDEEM_SCRIPT_1 = CScript([OP_1, OP_DROP])
        P2SH_1 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_1), OP_EQUAL])

        tx = CTransaction()
        tx.vin.append(
            CTxIn(COutPoint(int(self.coinbase_txids[1], 16), 0), b"",
                  0xffffffff))
        tx.vout.append(CTxOut(10, P2SH_1))
        tx.rehash()

        spendtx_raw = self.nodes[0].signrawtransactionwithwallet(
            ToHex(tx), [], "ALL", self.options.scheme)["hex"]
        spendtx = FromHex(spendtx, spendtx_raw)
        spendtx.rehash()
        copy_spendTx = CTransaction(spendtx)

        #cache hashes
        hash = spendtx.hash
        hashMalFix = spendtx.hashMalFix

        #malleate
        spendtxcopy = spendtx
        unDERify(spendtxcopy)
        spendtxcopy.rehash()

        # verify that hashMalFix remains the same even when signature is malleated and hash changes
        assert_not_equal(hash, spendtxcopy.hash)
        assert_equal(hashMalFix, spendtxcopy.hashMalFix)

        # verify that hash is spendtx.serialize()
        hash = encode(
            hash256(spendtx.serialize(with_witness=False))[::-1],
            'hex_codec').decode('ascii')
        assert_equal(hash, spendtx.hash)

        # verify that hashMalFix is spendtx.serialize(with_scriptsig=False)
        hashMalFix = encode(
            hash256(spendtx.serialize(with_witness=False,
                                      with_scriptsig=False))[::-1],
            'hex_codec').decode('ascii')
        assert_equal(hashMalFix, spendtx.hashMalFix)

        assert_not_equal(hash, hashMalFix)
        #as this transaction does not have witness data the following is true
        assert_equal(spendtx.serialize(),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_not_equal(
            spendtx.serialize(with_witness=False),
            spendtx.serialize(with_witness=True, with_scriptsig=False))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=True),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=False),
                     spendtx.serialize_without_witness(with_scriptsig=False))

        #Create block with only non-DER signature P2SH transaction
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 2),
                             block_time)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        # serialize with and without witness block remains the same
        assert_equal(block.serialize(with_witness=True), block.serialize())
        assert_equal(block.serialize(with_witness=True),
                     block.serialize(with_witness=False))
        assert_equal(block.serialize(with_witness=True),
                     block.serialize(with_witness=True, with_scriptsig=True))

        self.log.info("Reject block with non-DER signature")
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(),
                   lock=mininode_lock)
        with mininode_lock:
            assert_equal(self.nodes[0].p2p.last_message["reject"].code,
                         REJECT_INVALID)
            assert_equal(self.nodes[0].p2p.last_message["reject"].data,
                         block.sha256)
            assert_equal(self.nodes[0].p2p.last_message["reject"].reason,
                         b'block-validation-failed')

        self.log.info("Accept block with DER signature")
        #recreate block with DER sig transaction
        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 2),
                             block_time)
        block.vtx.append(copy_spendTx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        ## redeem previous P2SH
        #########################
        self.log.info("Test using P2SH redeem transaction ")

        tx = CTransaction()
        tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
        tx.vin.append(CTxIn(COutPoint(block.vtx[1].malfixsha256, 0), b''))

        (sighash, err) = SignatureHash(REDEEM_SCRIPT_1, tx, 1, SIGHASH_ALL)
        signKey = CECKey()
        signKey.set_secretbytes(b"horsebattery")
        sig = signKey.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))
        scriptSig = CScript([sig, REDEEM_SCRIPT_1])

        tx.vin[0].scriptSig = scriptSig
        tx.rehash()

        spendtx_raw = self.nodes[0].signrawtransactionwithwallet(
            ToHex(tx), [], "ALL", self.options.scheme)["hex"]
        spendtx = FromHex(spendtx, spendtx_raw)
        spendtx.rehash()

        #cache hashes
        hash = spendtx.hash
        hashMalFix = spendtx.hashMalFix

        #malleate
        spendtxcopy = spendtx
        unDERify(spendtxcopy)
        spendtxcopy.rehash()

        # verify that hashMalFix remains the same even when signature is malleated and hash changes
        assert_not_equal(hash, spendtxcopy.hash)
        assert_equal(hashMalFix, spendtxcopy.hashMalFix)

        # verify that hash is spendtx.serialize()
        hash = encode(
            hash256(spendtx.serialize(with_witness=False))[::-1],
            'hex_codec').decode('ascii')
        assert_equal(hash, spendtx.hash)

        # verify that hashMalFix is spendtx.serialize(with_scriptsig=False)
        hashMalFix = encode(
            hash256(spendtx.serialize(with_witness=False,
                                      with_scriptsig=False))[::-1],
            'hex_codec').decode('ascii')
        assert_equal(hashMalFix, spendtx.hashMalFix)

        assert_not_equal(hash, hashMalFix)
        #as this transaction does not have witness data the following is true
        assert_equal(spendtx.serialize(),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_not_equal(
            spendtx.serialize(with_witness=False),
            spendtx.serialize(with_witness=True, with_scriptsig=False))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=True),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=False),
                     spendtx.serialize_without_witness(with_scriptsig=False))

        #Create block with only non-DER signature P2SH redeem transaction
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 3),
                             block_time)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        # serialize with and without witness block remains the same
        assert_equal(block.serialize(with_witness=True), block.serialize())
        assert_equal(block.serialize(with_witness=True),
                     block.serialize(with_witness=False))
        assert_equal(block.serialize(with_witness=True),
                     block.serialize(with_witness=True, with_scriptsig=True))

        self.log.info("Accept block with P2SH redeem transaction")
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        ##  p2sh_p2wpkh transaction
        ##############################
        self.log.info("Test using p2sh_p2wpkh transaction ")
        spendtxStr = create_witness_tx(self.nodes[0],
                                       True,
                                       getInput(self.coinbase_txids[4]),
                                       self.pubkey,
                                       amount=1.0)

        #get CTRansaction object from above hex
        spendtx = CTransaction()
        spendtx.deserialize(BytesIO(hex_str_to_bytes(spendtxStr)))
        spendtx.rehash()

        #cache hashes
        spendtx.rehash()
        hash = spendtx.hash
        hashMalFix = spendtx.hashMalFix
        withash = spendtx.calc_sha256(True)

        # malleate
        unDERify(spendtx)
        spendtx.rehash()
        withash2 = spendtx.calc_sha256(True)

        # verify that hashMalFix remains the same even when signature is malleated and hash changes
        assert_equal(withash, withash2)
        assert_equal(hash, spendtx.hash)
        assert_equal(hashMalFix, spendtx.hashMalFix)

        # verify that hash is spendtx.serialize()
        hash = encode(hash256(spendtx.serialize())[::-1],
                      'hex_codec').decode('ascii')
        assert_equal(hash, spendtx.hash)

        # verify that hashMalFix is spendtx.serialize(with_scriptsig=False)
        hashMalFix = encode(
            hash256(spendtx.serialize(with_scriptsig=False))[::-1],
            'hex_codec').decode('ascii')
        assert_equal(hashMalFix, spendtx.hashMalFix)

        assert_not_equal(hash, hashMalFix)
        #as this transaction does not have witness data the following is true
        assert_equal(spendtx.serialize(),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_not_equal(
            spendtx.serialize(with_witness=False),
            spendtx.serialize(with_witness=True, with_scriptsig=False))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=True),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=False),
                     spendtx.serialize_without_witness(with_scriptsig=False))

        #Create block with only non-DER signature p2sh_p2wpkh transaction
        spendtxStr = self.nodes[0].signrawtransactionwithwallet(
            spendtxStr, [], "ALL", self.options.scheme)
        assert ("errors" not in spendtxStr or len(["errors"]) == 0)
        spendtxStr = spendtxStr["hex"]
        spendtx = CTransaction()
        spendtx.deserialize(BytesIO(hex_str_to_bytes(spendtxStr)))
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 4),
                             block_time)
        block.vtx.append(spendtx)
        add_witness_commitment(block)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        # serialize with and without witness
        assert_equal(block.serialize(with_witness=False), block.serialize())
        assert_not_equal(block.serialize(with_witness=True),
                         block.serialize(with_witness=False))
        assert_not_equal(
            block.serialize(with_witness=True),
            block.serialize(with_witness=False, with_scriptsig=True))

        self.log.info(
            "Reject block with p2sh_p2wpkh transaction and witness commitment")
        assert_raises_rpc_error(
            -22, "Block does not start with a coinbase",
            self.nodes[0].submitblock,
            bytes_to_hex_str(block.serialize(with_witness=True)))
        assert_equal(self.nodes[0].getbestblockhash(), tip)

        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 4),
                             block_time)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        self.log.info("Accept block with p2sh_p2wpkh transaction")
        self.nodes[0].submitblock(
            bytes_to_hex_str(block.serialize(with_witness=True)))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        ##  p2sh_p2wsh transaction
        ##############################
        self.log.info("Test using p2sh_p2wsh transaction")
        spendtxStr = create_witness_tx(self.nodes[0],
                                       True,
                                       getInput(self.coinbase_txids[5]),
                                       self.pubkey,
                                       amount=1.0)

        #get CTRansaction object from above hex
        spendtx = CTransaction()
        spendtx.deserialize(BytesIO(hex_str_to_bytes(spendtxStr)))
        spendtx.rehash()

        #cache hashes
        spendtx.rehash()
        hash = spendtx.hash
        hashMalFix = spendtx.hashMalFix
        withash = spendtx.calc_sha256(True)

        # malleate
        unDERify(spendtx)
        spendtx.rehash()
        withash2 = spendtx.calc_sha256(True)

        # verify that hashMalFix remains the same even when signature is malleated and hash changes
        assert_equal(withash, withash2)
        assert_equal(hash, spendtx.hash)
        assert_equal(hashMalFix, spendtx.hashMalFix)

        # verify that hash is spendtx.serialize()
        hash = encode(hash256(spendtx.serialize())[::-1],
                      'hex_codec').decode('ascii')
        assert_equal(hash, spendtx.hash)

        # verify that hashMalFix is spendtx.serialize(with_scriptsig=False)
        hashMalFix = encode(
            hash256(spendtx.serialize(with_scriptsig=False))[::-1],
            'hex_codec').decode('ascii')
        assert_equal(hashMalFix, spendtx.hashMalFix)

        assert_not_equal(hash, hashMalFix)
        #as this transaction does not have witness data the following is true
        assert_equal(spendtx.serialize(),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize(with_witness=True, with_scriptsig=True))
        assert_not_equal(
            spendtx.serialize(with_witness=False),
            spendtx.serialize(with_witness=True, with_scriptsig=False))
        assert_equal(spendtx.serialize(with_witness=False),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=True),
                     spendtx.serialize_without_witness(with_scriptsig=True))
        assert_equal(spendtx.serialize_with_witness(with_scriptsig=False),
                     spendtx.serialize_without_witness(with_scriptsig=False))

        #Create block with only non-DER signature p2sh_p2wsh transaction
        spendtxStr = self.nodes[0].signrawtransactionwithwallet(
            spendtxStr, [], "ALL", self.options.scheme)
        assert ("errors" not in spendtxStr or len(["errors"]) == 0)
        spendtxStr = spendtxStr["hex"]
        spendtx = CTransaction()
        spendtx.deserialize(BytesIO(hex_str_to_bytes(spendtxStr)))
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 5),
                             block_time)
        block.vtx.append(spendtx)
        add_witness_commitment(block)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        # serialize with and without witness
        assert_equal(block.serialize(with_witness=False), block.serialize())
        assert_not_equal(block.serialize(with_witness=True),
                         block.serialize(with_witness=False))
        assert_not_equal(
            block.serialize(with_witness=True),
            block.serialize(with_witness=False, with_scriptsig=True))

        self.log.info(
            "Reject block with p2sh_p2wsh transaction and witness commitment")
        assert_raises_rpc_error(
            -22, "Block does not start with a coinbase",
            self.nodes[0].submitblock,
            bytes_to_hex_str(block.serialize(with_witness=True)))
        assert_equal(self.nodes[0].getbestblockhash(), tip)

        block = create_block(int(tip, 16), create_coinbase(CHAIN_HEIGHT + 5),
                             block_time)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.hashImMerkleRoot = block.calc_immutable_merkle_root()
        block.rehash()
        block.solve(self.signblockprivkeys)

        self.log.info("Accept block with p2sh_p2wsh transaction")
        self.nodes[0].submitblock(
            bytes_to_hex_str(block.serialize(with_witness=True)))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)
Esempio n. 13
0
    def make_transactions(self,
                          txtype,
                          num_txns,
                          stxn_vin_size,
                          create_double_spends=False):
        key = CECKey()
        key.set_secretbytes(b"horsebattery")
        key.set_compressed(True)
        # Each coin being spent will always result in at least 14 expensive ECDSA checks.
        # 0x7f03 33 OP_NUM2BIN creates a valid non-zero compressed pubkey.
        redeem_script = CScript([
            OP_1,
            key.get_pubkey(), 0x7f03, 33, OP_NUM2BIN, OP_DUP, OP_2DUP, OP_2DUP,
            OP_2DUP, OP_3DUP, OP_3DUP, OP_15, OP_CHECKMULTISIG
        ])

        # Calculate how many found txns are needed to create a required spend money txns (num_txns)
        # - a fund txns are of type 1 - N (N=vouts_size_per_fund_txn)
        # - a spend money txns are of type M-1 (M inputs & 1 output)
        def estimate_fund_txns_number(num_txns, vouts_size_per_fund_txn):
            fund_txns_num = 1
            if num_txns >= vouts_size_per_fund_txn:
                if num_txns % vouts_size_per_fund_txn == 0:
                    fund_txns_num = num_txns // vouts_size_per_fund_txn
                else:
                    fund_txns_num = num_txns // vouts_size_per_fund_txn + 1
            return fund_txns_num * vouts_size_per_fund_txn

        # Create funding transactions that will provide funds for other transcations
        def make_fund_txn(node, out_value, num_vout_txns):
            # Create fund txn
            ftx = CTransaction()
            for i in range(num_vout_txns):
                ftx.vout.append(
                    CTxOut(
                        out_value,
                        CScript([OP_HASH160,
                                 hash160(redeem_script), OP_EQUAL])))
            # fund the transcation:
            ftxHex = node.fundrawtransaction(
                ToHex(ftx), {'changePosition': len(ftx.vout)})['hex']
            ftxHex = node.signrawtransaction(ftxHex)['hex']
            ftx = FromHex(CTransaction(), ftxHex)
            ftx.rehash()
            return ftx, ftxHex

        # Create a spend txn
        def make_spend_txn(txtype, fund_txn_hash, fund_txn_num_vouts,
                           out_value):
            # Create txn
            spend_tx = CTransaction()
            for idx in range(fund_txn_num_vouts):
                spend_tx.vin.append(CTxIn(COutPoint(fund_txn_hash, idx), b''))
                sighash = SignatureHashForkId(
                    redeem_script, spend_tx, idx,
                    SIGHASH_ANYONECANPAY | SIGHASH_FORKID | SIGHASH_NONE,
                    out_value)
                sig = key.sign(sighash) + bytes(
                    bytearray([
                        SIGHASH_ANYONECANPAY | SIGHASH_FORKID | SIGHASH_NONE
                    ]))
                spend_tx.vin[idx].scriptSig = CScript(
                    [OP_0, sig, redeem_script])
                # Standard transaction
                if TxType.standard == txtype:
                    spend_tx.vout.append(
                        CTxOut(out_value - 1000, CScript([OP_RETURN])))
                # Non-standard transaction
                elif TxType.nonstandard == txtype:
                    spend_tx.vout.append(
                        CTxOut(out_value - 1000, CScript([OP_TRUE])))
                spend_tx.rehash()
            return spend_tx

        #
        # Generate some blocks to have enough spendable coins
        #
        node = self.nodes[0]
        node.generate(101)

        #
        # Estimate a number of required fund txns
        #
        out_value = 2000
        # Number of outputs in each fund txn
        fund_txn_num_vouts = stxn_vin_size
        fund_txns_num = estimate_fund_txns_number(num_txns, fund_txn_num_vouts)

        #
        # Create and send fund txns to the mempool
        #
        fund_txns = []
        for i in range(fund_txns_num):
            ftx, ftxHex = make_fund_txn(node, out_value, fund_txn_num_vouts)
            node.sendrawtransaction(ftxHex)
            fund_txns.append(ftx)
        # Ensure that mempool is empty to avoid 'too-long-mempool-chain' errors in next test
        node.generate(1)

        #
        # Create spend transactions.
        #
        txtype_to_create = txtype
        spend_txs = []
        for i in range(len(fund_txns)):
            # If standard and non-standard txns are required then create equal (in size) sets.
            if TxType.std_and_nonstd == txtype:
                if i % 2:
                    txtype_to_create = TxType.standard
                else:
                    txtype_to_create = TxType.nonstandard
            # Create a spend money txn with fund_txn_num_vouts number of inputs.
            spend_tx = make_spend_txn(txtype_to_create, fund_txns[i].sha256,
                                      fund_txn_num_vouts, out_value)
            # Create double spend txns if required
            if create_double_spends and len(spend_txs) < num_txns // 2:
                # The first half of the array are double spend txns
                spend_tx.vin.append(
                    CTxIn(
                        COutPoint(fund_txns[len(fund_txns) - i - 1].sha256, 0),
                        b''))
                sighash = SignatureHashForkId(
                    redeem_script, spend_tx, stxn_vin_size,
                    SIGHASH_ANYONECANPAY | SIGHASH_FORKID | SIGHASH_NONE,
                    out_value)
                sig = key.sign(sighash) + bytes(
                    bytearray([
                        SIGHASH_ANYONECANPAY | SIGHASH_FORKID | SIGHASH_NONE
                    ]))
                spend_tx.vin[stxn_vin_size].scriptSig = CScript(
                    [OP_0, sig, redeem_script])
                spend_tx.rehash()
            spend_txs.append(spend_tx)
        return spend_txs
Esempio n. 14
0
class PTVRPCTests(ComparisonTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.genesisactivationheight = 600
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.locking_script = CScript([self.coinbase_pubkey, OP_CHECKSIG])
        self.default_args = [
            '-debug', '-maxgenesisgracefulperiod=0',
            '-genesisactivationheight=%d' % self.genesisactivationheight
        ]
        self.extra_args = [self.default_args] * self.num_nodes

    def run_test(self):
        self.test.run()

    # Sign a transaction, using the key we know about.
    # This signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    def check_mempool(self, rpc, should_be_in_mempool, timeout=20):
        wait_until(lambda: set(rpc.getrawmempool()) ==
                   {t.hash
                    for t in should_be_in_mempool},
                   timeout=timeout)

    # Generating transactions in order so first transaction's output will be an input for second transaction
    def get_chained_transactions(self,
                                 spend,
                                 num_of_transactions,
                                 money_to_spend=5000000000):
        txns = []
        for _ in range(0, num_of_transactions):
            money_to_spend = money_to_spend - 1000  # one satoshi to fee
            tx = create_transaction(spend.tx, spend.n, b"", money_to_spend,
                                    self.locking_script)
            self.sign_tx(tx, spend.tx, spend.n)
            tx.rehash()
            txns.append(tx)
            spend = PreviousSpendableOutput(tx, 0)
        return txns

    # Create a required number of chains with equal length.
    def get_txchains_n(self, num_of_chains, chain_length, spend):
        if num_of_chains > len(spend):
            raise Exception('Insufficient number of spendable outputs.')
        txchains = []
        for x in range(0, num_of_chains):
            txchains += self.get_chained_transactions(spend[x], chain_length)
        return txchains

    # Test an attempt to resubmit transactions (via rpc interface) which are already known
    # - received earlier via p2p interface and not processed yet
    # - use sendrawtransaction rpc interface (a single txn submit) to submit duplicates
    def run_scenario1(self,
                      conn,
                      num_of_chains,
                      chain_length,
                      spend,
                      allowhighfees=False,
                      dontcheckfee=False,
                      timeout=30):
        # Create tx chains.
        txchains = self.get_txchains_n(num_of_chains, chain_length, spend)
        # Send txns, one by one, through p2p interface.
        for tx in range(len(txchains)):
            conn.send_message(msg_tx(txchains[tx]))
        # Check if there is an expected number of transactions in the validation queues
        # - this scenario relies on ptv delayed processing
        # - ptv is required to be paused
        wait_until(lambda: conn.rpc.getblockchainactivity()["transactions"] ==
                   num_of_chains * chain_length,
                   timeout=timeout)
        # No transactions should be in the mempool.
        assert_equal(conn.rpc.getmempoolinfo()['size'], 0)
        # Resubmit txns through rpc interface
        # - there should be num_of_chains*chain_length txns detected as known transactions
        #   - due to the fact that all were already received via p2p interface
        for tx in range(len(txchains)):
            assert_raises_rpc_error(-26, "txn-already-known",
                                    conn.rpc.sendrawtransaction,
                                    ToHex(txchains[tx]), allowhighfees,
                                    dontcheckfee)
        # No transactions should be in the mempool.
        assert_equal(conn.rpc.getmempoolinfo()['size'], 0)

        return txchains

    # An extension to the scenario1.
    # - submit txns through p2p interface
    # - resubmit transactions (via rpc interface) which are already known
    # - create a new block
    # - use invalidateblock to re-org back
    # - create a new block
    # - check if txns are present in the new block
    def run_scenario2(self,
                      conn,
                      num_of_chains,
                      chain_length,
                      spend,
                      allowhighfees=False,
                      dontcheckfee=False,
                      timeout=60):
        # Create tx chains.
        txchains = self.run_scenario1(conn, num_of_chains, chain_length, spend,
                                      allowhighfees, dontcheckfee, timeout)
        wait_for_ptv_completion(conn, len(txchains), timeout=timeout)
        # Check if txchains txns are in the mempool.
        self.check_mempool(conn.rpc, set(txchains), timeout=60)
        # Check if there is only num_of_chains * chain_length txns in the mempool.
        assert_equal(conn.rpc.getmempoolinfo()['size'], len(txchains))
        # Generate a single block.
        mined_block1 = conn.rpc.generate(1)
        # Mempool should be empty, all txns in the block.
        assert_equal(conn.rpc.getmempoolinfo()['size'], 0)
        # Use invalidateblock to re-org back; all transactions should
        # end up unconfirmed and back in the mempool.
        conn.rpc.invalidateblock(mined_block1[0])
        # There should be exactly num_of_chains * chain_length txns in the mempool.
        assert_equal(conn.rpc.getmempoolinfo()['size'], len(txchains))
        self.check_mempool(conn.rpc, set(txchains))
        # Generate another block, they should all get mined.
        mined_block2 = conn.rpc.generate(1)
        # Mempool should be empty, all txns confirmed.
        assert_equal(conn.rpc.getmempoolinfo()['size'], 0)
        # Check if txchains txns are included in the block.
        mined_block2_details = conn.rpc.getblock(mined_block2[0])
        assert_equal(mined_block2_details['num_tx'],
                     len(txchains) + 1)  # +1 for coinbase txn.
        assert_equal(
            len(
                set(mined_block2_details['tx']).intersection(
                    t.hash for t in txchains)), len(txchains))

    def get_tests(self):
        rejected_txs = []

        def on_reject(conn, msg):
            rejected_txs.append(msg)

        # Shorthand for functions
        block = self.chain.next_block
        node = self.nodes[0]
        self.chain.set_genesis_hash(int(node.getbestblockhash(), 16))

        # Create a new block
        block(0, coinbase_pubkey=self.coinbase_pubkey)
        self.chain.save_spendable_output()
        yield self.accepted()

        # Now we need that block to mature so we can spend the coinbase.
        # Also, move block height on beyond Genesis activation.
        test = TestInstance(sync_every_block=False)
        for i in range(600):
            block(5000 + i, coinbase_pubkey=self.coinbase_pubkey)
            test.blocks_and_transactions.append([self.chain.tip, True])
            self.chain.save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on.
        out = []
        for i in range(200):
            out.append(self.chain.get_spendable_output())

        self.stop_node(0)

        # Scenario 1 (TS1).
        # This test case checks if resubmited transactions (through sendrawtransaction interface) are rejected,
        # at the early stage of processing (before txn validation is executed).
        # - 1K txs used
        # - 1K txns are sent first through the p2p interface (and not processed as ptv is paused)
        # - allowhighfees=False (default)
        # - dontcheckfee=False (default)
        #
        # Test case config
        num_of_chains = 10
        chain_length = 100
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=10000', '-limitancestorcount=100',
            '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS1: {} chains of length {}. Test duplicates resubmitted via rpc.'
                .format(num_of_chains, chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario1(conn, num_of_chains, chain_length, out)

        # Scenario 2 (TS2).
        # It's an extension to TS1. Resubmit duplicates, then create a new block and check if it is a valid block.
        # - 100 txs used
        # - allowhighfees=False (default)
        # - dontcheckfee=False (default)
        #
        # Test case config
        num_of_chains = 10
        chain_length = 10
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=2000',
            '-blockcandidatevaliditytest=1',  # on regtest it's enabled by default but for clarity let's add it explicitly.
            '-checkmempool=0',
            '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS2: {} chains of length {}. Test duplicates and generate a new block.'
                .format(num_of_chains, chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario2(conn, num_of_chains, chain_length, out)
Esempio n. 15
0
class FullBlockTest(ComparisonTestFramework):

    # Can either run this test as 1 node with expected answers, or two and compare them.
    # Change the "outcome" variable from each TestInstance object to only do
    # the comparison.

    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.tip = None
        self.blocks = {}

    def setup_network(self):
        self.extra_args = [['-norelaypriority']]
        self.add_nodes(self.num_nodes, self.extra_args)
        self.start_nodes()

    def add_options(self, parser):
        super().add_options(parser)
        parser.add_option(
            "--runbarelyexpensive", dest="runbarelyexpensive", default=True)

    def run_test(self):
        self.test = TestManager(self, self.options.tmpdir)
        self.test.add_all_connections(self.nodes)
        # Start up network handling in another thread
        NetworkThread().start()
        self.test.run()

    def add_transactions_to_block(self, block, tx_list):
        [tx.rehash() for tx in tx_list]
        block.vtx.extend(tx_list)

    # this is a little handier to use than the version in blocktools.py
    def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        tx = create_transaction(spend_tx, n, b"", value, script)
        return tx

    # sign a transaction, using the key we know about
    # this signs input 0 in tx, which is assumed to be spending output n in
    # spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
            tx.vin[0].scriptSig = CScript()
            return
        sighash = SignatureHashForkId(
            spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript(
            [self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))])

    def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        tx = self.create_tx(spend_tx, n, value, script)
        self.sign_tx(tx, spend_tx, n)
        tx.rehash()
        return tx

    def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE])):
        if self.tip == None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        coinbase.rehash()
        if spend == None:
            block = create_block(base_block_hash, coinbase, block_time)
        else:
            # all but one satoshi to fees
            coinbase.vout[0].nValue += spend.tx.vout[
                spend.n].nValue - 1
            coinbase.rehash()
            block = create_block(base_block_hash, coinbase, block_time)
            # spend 1 satoshi
            tx = create_transaction(spend.tx, spend.n, b"", 1, script)
            self.sign_tx(tx, spend.tx, spend.n)
            self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
        # Do PoW, which is very inexpensive on regnet
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject=None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # adds transactions to the block and updates state
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            self.add_transactions_to_block(block, new_transactions)
            old_sha256 = block.sha256
            block.hashMerkleRoot = block.calc_merkle_root()
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            if block.sha256 != old_sha256:
                self.block_heights[
                    block.sha256] = self.block_heights[old_sha256]
                del self.block_heights[old_sha256]
            self.blocks[block_number] = block
            return block

        # shorthand for functions
        block = self.next_block
        create_tx = self.create_tx

        # shorthand for variables
        node = self.nodes[0]

        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()

        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(5000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(33):
            out.append(get_spendable_output())

        # P2SH
        # Build the redeem script, hash it, use hash to create the p2sh script
        redeem_script = CScript([self.coinbase_pubkey] + [
                                OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG])
        redeem_script_hash = hash160(redeem_script)
        p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])

        # Creates a new transaction using a p2sh transaction as input
        def spend_p2sh_tx(p2sh_tx_to_spend, output_script=CScript([OP_TRUE])):
            # Create the transaction
            spent_p2sh_tx = CTransaction()
            spent_p2sh_tx.vin.append(
                CTxIn(COutPoint(p2sh_tx_to_spend.sha256, 0), b''))
            spent_p2sh_tx.vout.append(CTxOut(1, output_script))
            # Sign the transaction using the redeem script
            sighash = SignatureHashForkId(
                redeem_script, spent_p2sh_tx, 0, SIGHASH_ALL | SIGHASH_FORKID, p2sh_tx_to_spend.vout[0].nValue)
            sig = self.coinbase_key.sign(
                sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            spent_p2sh_tx.vin[0].scriptSig = CScript([sig, redeem_script])
            spent_p2sh_tx.rehash()
            return spent_p2sh_tx

        # P2SH tests
        # Create a p2sh transaction
        p2sh_tx = self.create_and_sign_transaction(
            out[0].tx, out[0].n, 1, p2sh_script)

        # Add the transaction to the block
        block(1)
        update_block(1, [p2sh_tx])
        yield accepted()

        # Sigops p2sh limit for the mempool test
        p2sh_sigops_limit_mempool = MAX_STANDARD_TX_SIGOPS - \
            redeem_script.GetSigOpCount(True)
        # Too many sigops in one p2sh script
        too_many_p2sh_sigops_mempool = CScript(
            [OP_CHECKSIG] * (p2sh_sigops_limit_mempool + 1))

        # A transaction with this output script can't get into the mempool
        assert_raises_rpc_error(-26, RPC_TXNS_TOO_MANY_SIGOPS_ERROR, node.sendrawtransaction,
                                ToHex(spend_p2sh_tx(p2sh_tx, too_many_p2sh_sigops_mempool)))

        # The transaction is rejected, so the mempool should still be empty
        assert_equal(set(node.getrawmempool()), set())

        # Max sigops in one p2sh txn
        max_p2sh_sigops_mempool = CScript(
            [OP_CHECKSIG] * (p2sh_sigops_limit_mempool))

        # A transaction with this output script can get into the mempool
        max_p2sh_sigops_txn = spend_p2sh_tx(p2sh_tx, max_p2sh_sigops_mempool)
        max_p2sh_sigops_txn_id = node.sendrawtransaction(
            ToHex(max_p2sh_sigops_txn))
        assert_equal(set(node.getrawmempool()), {max_p2sh_sigops_txn_id})

        # Mine the transaction
        block(2, spend=out[1])
        update_block(2, [max_p2sh_sigops_txn])
        yield accepted()

        # The transaction has been mined, it's not in the mempool anymore
        assert_equal(set(node.getrawmempool()), set())
class MemepoolAcceptingTransactionsDuringReorg(BitcoinTestFramework):
    def __init__(self, *a, **kw):
        super(MemepoolAcceptingTransactionsDuringReorg,
              self).__init__(*a, **kw)
        self.private_key = CECKey()
        self.private_key.set_secretbytes(b"fatstacks")
        self.public_key = self.private_key.get_pubkey()

    def set_test_params(self):
        self.setup_clean_chain = True
        self.num_nodes = 1

    def setup_network(self):
        self.setup_nodes()

    def setup_nodes(self):
        self.add_nodes(self.num_nodes)

    long_eval_script = [
        bytearray(b"x" * 300000),
        bytearray(b"y" * 290000), OP_MUL, OP_DROP
    ]

    def create_tx(self,
                  outpoints,
                  noutput,
                  feerate,
                  make_long_eval_script=False):
        """creates p2pk transaction always using the same key (created in constructor), if make_long_eval_script is set
        we are prepending long evaluating script to the locking script
        """
        pre_script = MemepoolAcceptingTransactionsDuringReorg.long_eval_script if make_long_eval_script else []

        tx = CTransaction()
        total_input = 0
        for parent_tx, n in outpoints:
            tx.vin.append(
                CTxIn(COutPoint(parent_tx.sha256, n), CScript([b"0" * 72]),
                      0xffffffff))
            total_input += parent_tx.vout[n].nValue

        for _ in range(noutput):
            tx.vout.append(
                CTxOut(total_input // noutput,
                       CScript(pre_script + [self.public_key, OP_CHECKSIG])))

        tx.rehash()

        tx_size = len(tx.serialize())
        fee_per_output = int(tx_size * feerate // noutput)

        for output in tx.vout:
            output.nValue -= fee_per_output

        for input, (parent_tx, n) in zip(tx.vin, outpoints):
            sighash = SignatureHashForkId(parent_tx.vout[n].scriptPubKey, tx,
                                          0, SIGHASH_ALL | SIGHASH_FORKID,
                                          parent_tx.vout[n].nValue)
            input.scriptSig = CScript([
                self.private_key.sign(sighash) +
                bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            ])

        tx.rehash()
        return tx

    def make_block(self, txs, parent_hash, parent_height, parent_time):
        """ creates a block with given transactions"""
        block = create_block(int(parent_hash, 16),
                             coinbase=create_coinbase(pubkey=self.public_key,
                                                      height=parent_height +
                                                      1),
                             nTime=parent_time + 1)
        block.vtx.extend(txs)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.calc_sha256()
        block.solve()

        return block

    def run_test(self):
        with self.run_node_with_connections(
                "Preparation",
                0, [
                    "-blockmintxfee=0.00001",
                    "-relayfee=0.000005",
                    "-checkmempool=0",
                ],
                number_of_connections=1) as (conn, ):
            mining_fee = 1.1

            # create block with coinbase
            coinbase = create_coinbase(pubkey=self.public_key, height=1)
            first_block = create_block(int(conn.rpc.getbestblockhash(), 16),
                                       coinbase=coinbase)
            first_block.solve()
            conn.send_message(msg_block(first_block))
            wait_until(lambda: conn.rpc.getbestblockhash() == first_block.hash,
                       check_interval=0.3)

            # mature the coinbase
            conn.rpc.generate(150)

            funding_tx = self.create_tx([(coinbase, 0)], 2006, mining_fee)
            conn.send_message(msg_tx(funding_tx))
            check_mempool_equals(conn.rpc, [funding_tx])

            # generates a root block with our funding transaction
            conn.rpc.generate(1)

            # create 2000 standard p2pk transactions
            a1_txs = []
            for m in range(2000):
                a1_txs.append(self.create_tx([(funding_tx, m)], 1, mining_fee))

            a1_spends = []
            for a1_tx in a1_txs:
                a1_spends.append(self.create_tx([(a1_tx, 0)], 1, mining_fee))

            # create 2000 standard p2pk transactions which are spending the same outputs as a1_txs
            double_spend_txs = []
            for m in range(2000):
                double_spend_txs.append(
                    self.create_tx([(funding_tx, m)], 1, mining_fee))

            TX_COUNT = 8
            # create for pairs of long-evaluating transactions for blocks b1, b2, c1, and c2
            long_eval_txs = []
            for m in range(2000, 2006):
                long_eval_txs.append(
                    self.create_tx([(funding_tx, m)],
                                   1,
                                   0.0001,
                                   make_long_eval_script=True))
                for _ in range(TX_COUNT - 1):
                    long_eval_txs.append(
                        self.create_tx([(long_eval_txs[-1], 0)],
                                       1,
                                       0.0001,
                                       make_long_eval_script=True))

            root_block_info = conn.rpc.getblock(conn.rpc.getbestblockhash())
            root_hash = root_block_info["hash"]
            root_height = root_block_info["height"]
            root_time = root_block_info["time"]

            # create all blocks needed for this test
            block_a1 = self.make_block(a1_txs, root_hash, root_height,
                                       root_time)
            block_b1 = self.make_block(
                long_eval_txs[0 * TX_COUNT:1 * TX_COUNT], root_hash,
                root_height, root_time)
            block_b2 = self.make_block(
                long_eval_txs[1 * TX_COUNT:2 * TX_COUNT], block_b1.hash,
                root_height + 1, root_time + 100)
            block_c1 = self.make_block(
                long_eval_txs[2 * TX_COUNT:3 * TX_COUNT], root_hash,
                root_height, root_time)
            block_c2 = self.make_block(
                long_eval_txs[3 * TX_COUNT:4 * TX_COUNT], block_c1.hash,
                root_height + 1, root_time + 101)
            block_d1 = self.make_block(
                long_eval_txs[4 * TX_COUNT:5 * TX_COUNT], root_hash,
                root_height, root_time)
            block_d2 = self.make_block(
                long_eval_txs[5 * TX_COUNT:6 * TX_COUNT], block_d1.hash,
                root_height + 1, root_time + 102)

            conn.send_message(msg_block(block_a1))
            wait_until(lambda: conn.rpc.getbestblockhash() == block_a1.hash,
                       check_interval=0.3)

        with self.run_node_with_connections(
                "1. Try sending the same transaction that are in the disconnected block during the reorg",
                0, [
                    "-blockmintxfee=0.00001",
                    "-relayfee=0.000005",
                    "-maxtxsizepolicy=0",
                    "-maxstdtxnsperthreadratio=1",
                    "-maxnonstdtxnsperthreadratio=1",
                    '-maxnonstdtxvalidationduration=100000',
                    '-maxtxnvalidatorasynctasksrunduration=100001',
                    '-genesisactivationheight=1',
                    '-maxstackmemoryusageconsensus=2GB',
                    "-maxscriptsizepolicy=2GB",
                    "-acceptnonstdoutputs=1",
                ],
                number_of_connections=1) as (conn, ):

            # send all transactions form block a1 at once and flood the PTV
            for tx in a1_txs:
                conn.send_message(msg_tx(tx))

            # announce blocks b1, and b2 and send them triggering the reorg
            headers = msg_headers()
            headers.headers.append(block_b1)
            headers.headers.append(block_b2)
            conn.send_message(headers)

            conn.send_message(msg_block(block_b1))
            conn.send_message(msg_block(block_b2))

            # here we are having the PTV and PBV working at the same time, filling the mempool while
            # the a1 is disconnected

            # check if everything is as expected
            wait_until(lambda: conn.rpc.getbestblockhash() == block_b2.hash,
                       timeout=60,
                       check_interval=1)
            check_mempool_equals(conn.rpc, a1_txs)

            # now prepare for next scenario
            conn.rpc.invalidateblock(block_b1.hash)
            wait_until(lambda: conn.rpc.getbestblockhash() == block_a1.hash,
                       check_interval=1)

            # transactions from the disconnected blocks b1 and b2 will not be added to mempool because of
            # the insufficient priority (zero fee)
            check_mempool_equals(conn.rpc, [], timeout=60, check_interval=1)

        with self.run_node_with_connections(
                "2. Try sending transaction that are spending same inputs as transactions in the disconnected block during the reorg",
                0, [
                    "-blockmintxfee=0.00001",
                    "-relayfee=0.000005",
                    "-maxtxsizepolicy=0",
                    "-maxstdtxnsperthreadratio=1",
                    "-maxnonstdtxnsperthreadratio=1",
                    '-maxnonstdtxvalidationduration=100000',
                    '-maxtxnvalidatorasynctasksrunduration=100001',
                    '-genesisactivationheight=1',
                    '-maxstackmemoryusageconsensus=2GB',
                    "-maxscriptsizepolicy=2GB",
                    "-acceptnonstdoutputs=1",
                ],
                number_of_connections=1) as (conn, ):

            # see if everything is still as expected
            wait_until(lambda: conn.rpc.getbestblockhash() == block_a1.hash,
                       check_interval=1)
            check_mempool_equals(conn.rpc, [], timeout=60, check_interval=1)

            # send all transactions that are the double-spends of txs form block a1
            for double_spend_tx in double_spend_txs:
                conn.send_message(msg_tx(double_spend_tx))

            # announce and send c1, and c2
            headers = msg_headers()
            headers.headers.append(block_c1)
            headers.headers.append(block_c2)
            conn.send_message(headers)

            conn.send_message(msg_block(block_c1))
            conn.send_message(msg_block(block_c2))

            # here we are having the PTV and PBV working at the same time, filling the mempool with double-spends
            # while the a1 is disconnected

            # see if everything is as expected
            wait_until(lambda: conn.rpc.getbestblockhash() == block_c2.hash,
                       timeout=60,
                       check_interval=1)
            # in the mempool we want all transactions for blocks a1
            # while no double_spend_txs should be present
            check_mempool_equals(conn.rpc,
                                 a1_txs,
                                 timeout=60,
                                 check_interval=1)

            # now prepare for next scenario
            conn.rpc.invalidateblock(block_c1.hash)
            wait_until(lambda: conn.rpc.getbestblockhash() == block_a1.hash,
                       check_interval=1)

            # transactions from the disconnected blocks c1 and c2 will not be added to mempool because of
            # the insufficient priority (zero fee)
            check_mempool_equals(conn.rpc, [], timeout=60, check_interval=1)

        with self.run_node_with_connections(
                "3. Submit transactions that are spending ouputs from disconnecting block and try to mine a block during the reorg",
                0, [
                    "-blockmintxfee=0.00001",
                    "-relayfee=0.000005",
                    "-maxtxsizepolicy=0",
                    '-maxnonstdtxvalidationduration=100000',
                    '-maxtxnvalidatorasynctasksrunduration=100001',
                    '-genesisactivationheight=1',
                    '-maxstackmemoryusageconsensus=2GB',
                    "-maxscriptsizepolicy=2GB",
                    "-acceptnonstdoutputs=1",
                ],
                number_of_connections=1) as (conn, ):

            # see if everything is still as expected
            wait_until(lambda: conn.rpc.getbestblockhash() == block_a1.hash,
                       check_interval=1)
            check_mempool_equals(conn.rpc, [], timeout=60, check_interval=1)

            for tx in a1_spends:
                conn.send_message(msg_tx(tx))

            # send transactions that are spending outputs from the soon-to-be-disconnected block (a1)
            check_mempool_equals(conn.rpc, a1_spends, timeout=100)

            # announce blocks d1, and d2 and send them triggering the reorg
            headers = msg_headers()
            headers.headers.append(block_d1)
            headers.headers.append(block_d2)
            conn.send_message(headers)

            conn.send_message(msg_block(block_d1))
            conn.send_message(msg_block(block_d2))

            # lets give a chance for reorg to start
            sleep(0.5)

            # we are in the middle of the reorg, let try to mine a block
            # if we are in inconsistent state this call would fail
            conn.rpc.generate(1)
class ColoredCoinTest(BitcoinTestFramework):
    def set_test_params(self):
        self.pubkeys = [
            "025700236c2890233592fcef262f4520d22af9160e3d9705855140eb2aa06c35d3",
            "03831a69b8009833ab5b0326012eaf489bfea35a7321b1ca15b11d88131423fafc"
        ]

        privkeystr = [
            "67ae3f5bfb3464b9704d7bd3a134401cc80c3a172240ebfca9f1e40f51bb6d37",
            "dbb9d19637018267268dfc2cc7aec07e7217c1a2d6733e1184a0909273bf078b"
        ]

        self.privkeys = []
        for key in privkeystr:
            ckey = CECKey()
            ckey.set_secretbytes(bytes.fromhex(key))
            self.privkeys.append(ckey)

        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(
            bytes.fromhex(
                "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747"
            ))
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()

        self.schnorr_key = Schnorr()
        self.schnorr_key.set_secretbytes(
            bytes.fromhex(
                "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747"
            ))

        self.num_nodes = 1
        self.setup_clean_chain = True

    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node
        self.address = node.getnewaddress()
        node.add_p2p_connection(P2PDataStore())
        node.p2p.wait_for_getheaders(timeout=5)
        self.address = self.nodes[0].getnewaddress()

        self.log.info("Test starting...")

        #generate 10 blocks for coinbase outputs
        coinbase_txs = []
        for i in range(1, 10):
            height = node.getblockcount() + 1
            coinbase_tx = create_coinbase(height, self.coinbase_pubkey)
            coinbase_txs.append(coinbase_tx)
            tip = node.getbestblockhash()
            block_time = node.getblockheader(tip)["mediantime"] + 1
            block = create_block(int(tip, 16), coinbase_tx, block_time)
            block.solve(self.signblockprivkey)
            tip = block.hash

            node.p2p.send_and_ping(msg_block(block))
            assert_equal(node.getbestblockhash(), tip)

        change_script = CScript([self.coinbase_pubkey, OP_CHECKSIG])
        burn_script = CScript([hex_str_to_bytes(self.pubkeys[1]), OP_CHECKSIG])

        #TxSuccess1 - coinbaseTx1 - issue 100 REISSUABLE  + 30     (UTXO-1,2)
        colorId_reissuable = colorIdReissuable(
            coinbase_txs[0].vout[0].scriptPubKey)
        script_reissuable = CP2PHK_script(colorId=colorId_reissuable,
                                          pubkey=self.pubkeys[0])
        script_transfer_reissuable = CP2PHK_script(colorId=colorId_reissuable,
                                                   pubkey=self.pubkeys[1])

        txSuccess1 = CTransaction()
        txSuccess1.vin.append(
            CTxIn(COutPoint(coinbase_txs[0].malfixsha256, 0), b""))
        txSuccess1.vout.append(CTxOut(100, script_reissuable))
        txSuccess1.vout.append(
            CTxOut(30 * COIN, CScript([self.coinbase_pubkey, OP_CHECKSIG])))
        sig_hash, err = SignatureHash(coinbase_txs[0].vout[0].scriptPubKey,
                                      txSuccess1, 0, SIGHASH_ALL)
        signature = self.coinbase_key.sign(
            sig_hash) + b'\x01'  # 0x1 is SIGHASH_ALL
        txSuccess1.vin[0].scriptSig = CScript([signature])
        txSuccess1.rehash()

        test_transaction_acceptance(node, txSuccess1, accepted=True)

        #TxSuccess2 - (UTXO-2)    - issue 100 NON-REISSUABLE       (UTXO-3)
        colorId_nonreissuable = colorIdNonReissuable(
            COutPoint(txSuccess1.malfixsha256, 1).serialize())
        script_nonreissuable = CP2PHK_script(colorId=colorId_nonreissuable,
                                             pubkey=self.pubkeys[0])
        script_transfer_nonreissuable = CP2PHK_script(
            colorId=colorId_nonreissuable, pubkey=self.pubkeys[1])

        txSuccess2 = CTransaction()
        txSuccess2.vin.append(CTxIn(COutPoint(txSuccess1.malfixsha256, 1),
                                    b""))
        txSuccess2.vout.append(CTxOut(100, script_nonreissuable))
        sig_hash, err = SignatureHash(txSuccess1.vout[1].scriptPubKey,
                                      txSuccess2, 0, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        txSuccess2.vin[0].scriptSig = CScript([signature])
        txSuccess2.rehash()

        test_transaction_acceptance(node, txSuccess2, accepted=True)

        #TxSuccess3 - coinbaseTx2 - issue 1 NFT                    (UTXO-4)
        colorId_nft = colorIdNFT(
            COutPoint(coinbase_txs[1].malfixsha256, 0).serialize())
        script_nft = CP2PHK_script(colorId=colorId_nft, pubkey=self.pubkeys[0])
        script_transfer_nft = CP2PHK_script(colorId=colorId_nft,
                                            pubkey=self.pubkeys[0])

        txSuccess3 = CTransaction()
        txSuccess3.vin.append(
            CTxIn(COutPoint(coinbase_txs[1].malfixsha256, 0), b""))
        txSuccess3.vout.append(CTxOut(1, script_nft))
        sig_hash, err = SignatureHash(coinbase_txs[1].vout[0].scriptPubKey,
                                      txSuccess3, 0, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        txSuccess3.vin[0].scriptSig = CScript([signature])
        txSuccess3.rehash()

        test_transaction_acceptance(node, txSuccess3, accepted=True)

        #TxFailure4 - (UTXO-1)    - split REISSUABLE - 25 + 75     (UTXO-5,6)
        #           - (UTXO-3)    - split NON-REISSUABLE - 40 + 60 (UTXO-7,8)
        #           - coinbaseTx3 - issue 100 REISSUABLE           (UTXO-9)
        TxFailure4 = CTransaction()
        TxFailure4.vin.append(CTxIn(COutPoint(txSuccess1.malfixsha256, 0),
                                    b""))
        TxFailure4.vin.append(CTxIn(COutPoint(txSuccess2.malfixsha256, 0),
                                    b""))
        TxFailure4.vin.append(
            CTxIn(COutPoint(coinbase_txs[2].malfixsha256, 0), b""))
        TxFailure4.vout.append(CTxOut(25, script_reissuable))
        TxFailure4.vout.append(CTxOut(75, script_reissuable))
        TxFailure4.vout.append(CTxOut(40, script_nonreissuable))
        TxFailure4.vout.append(CTxOut(60, script_nonreissuable))
        TxFailure4.vout.append(CTxOut(100, script_reissuable))
        sig_hash, err = SignatureHash(txSuccess1.vout[0].scriptPubKey,
                                      TxFailure4, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure4.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess2.vout[0].scriptPubKey,
                                      TxFailure4, 1, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure4.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(coinbase_txs[2].vout[0].scriptPubKey,
                                      TxFailure4, 2, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        TxFailure4.vin[2].scriptSig = CScript([signature])
        TxFailure4.rehash()

        test_transaction_acceptance(node,
                                    TxFailure4,
                                    accepted=False,
                                    reason=b"bad-txns-token-balance")

        #TxSuccess4 - (UTXO-1)    - split REISSUABLE - 25 + 75     (UTXO-5,6)
        #           - (UTXO-3)    - split NON-REISSUABLE - 40 + 60 (UTXO-7,8)
        txSuccess4 = CTransaction()
        txSuccess4.vin.append(CTxIn(COutPoint(txSuccess1.malfixsha256, 0),
                                    b""))
        txSuccess4.vin.append(CTxIn(COutPoint(txSuccess2.malfixsha256, 0),
                                    b""))
        txSuccess4.vin.append(
            CTxIn(COutPoint(coinbase_txs[2].malfixsha256, 0), b""))
        txSuccess4.vout.append(CTxOut(25, script_reissuable))
        txSuccess4.vout.append(CTxOut(75, script_reissuable))
        txSuccess4.vout.append(CTxOut(40, script_nonreissuable))
        txSuccess4.vout.append(CTxOut(60, script_nonreissuable))
        sig_hash, err = SignatureHash(txSuccess1.vout[0].scriptPubKey,
                                      txSuccess4, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess4.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess2.vout[0].scriptPubKey,
                                      txSuccess4, 1, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess4.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(coinbase_txs[2].vout[0].scriptPubKey,
                                      txSuccess4, 2, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        txSuccess4.vin[2].scriptSig = CScript([signature])
        txSuccess4.rehash()

        test_transaction_acceptance(node, txSuccess4, accepted=True)

        #TxFailure5 - (UTXO-6)    - split REISSUABLE(75)           (UTXO-10,11)
        #           - (UTXO-7)    - split NON-REISSUABLE(40)       (UTXO-12)
        #           - (UTXO-4)    - split NFT                      (UTXO-13)
        #           - coinbaseTx4
        TxFailure5 = CTransaction()
        TxFailure5.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 1),
                                    b""))
        TxFailure5.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 2),
                                    b""))
        TxFailure5.vin.append(CTxIn(COutPoint(txSuccess3.malfixsha256, 0),
                                    b""))
        TxFailure5.vin.append(
            CTxIn(COutPoint(coinbase_txs[3].malfixsha256, 0), b""))
        TxFailure5.vout.append(CTxOut(35, script_reissuable))
        TxFailure5.vout.append(CTxOut(40, script_reissuable))
        TxFailure5.vout.append(CTxOut(20, script_nonreissuable))
        TxFailure5.vout.append(CTxOut(20, script_nonreissuable))
        TxFailure5.vout.append(CTxOut(1, script_nft))
        TxFailure5.vout.append(CTxOut(1, script_nft))
        sig_hash, err = SignatureHash(txSuccess4.vout[1].scriptPubKey,
                                      TxFailure5, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure5.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess4.vout[2].scriptPubKey,
                                      TxFailure5, 1, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure5.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess3.vout[0].scriptPubKey,
                                      TxFailure5, 2, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure5.vin[2].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(coinbase_txs[3].vout[0].scriptPubKey,
                                      TxFailure5, 3, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        TxFailure5.vin[3].scriptSig = CScript([signature])
        TxFailure5.rehash()

        test_transaction_acceptance(node,
                                    TxFailure5,
                                    accepted=False,
                                    reason=b"bad-txns-token-balance")

        #txSuccess5 - (UTXO-6)    - split REISSUABLE(75)           (UTXO-10,11)
        #           - (UTXO-7)    - split NON-REISSUABLE(40)       (UTXO-12)
        #           - (UTXO-4)    - transfer NFT                      (UTXO-13)
        #           - coinbaseTx4
        txSuccess5 = CTransaction()
        txSuccess5.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 1),
                                    b""))
        txSuccess5.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 2),
                                    b""))
        txSuccess5.vin.append(CTxIn(COutPoint(txSuccess3.malfixsha256, 0),
                                    b""))
        txSuccess5.vin.append(
            CTxIn(COutPoint(coinbase_txs[3].malfixsha256, 0), b""))
        txSuccess5.vout.append(CTxOut(35, script_reissuable))
        txSuccess5.vout.append(CTxOut(40, script_reissuable))
        txSuccess5.vout.append(CTxOut(20, script_nonreissuable))
        txSuccess5.vout.append(CTxOut(20, script_nonreissuable))
        txSuccess5.vout.append(CTxOut(1, script_nft))
        sig_hash, err = SignatureHash(txSuccess4.vout[1].scriptPubKey,
                                      txSuccess5, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess5.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess4.vout[2].scriptPubKey,
                                      txSuccess5, 1, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess5.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess3.vout[0].scriptPubKey,
                                      txSuccess5, 2, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess5.vin[2].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(coinbase_txs[3].vout[0].scriptPubKey,
                                      txSuccess5, 3, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        txSuccess5.vin[3].scriptSig = CScript([signature])
        txSuccess5.rehash()

        test_transaction_acceptance(node, txSuccess5, accepted=True)

        #TxFailure6 - (UTXO-11)   - transfer REISSUABLE(40)        (UTXO-14)
        #           - (UTXO-8)    - burn NON-REISSUABLE(60)        (UTXO-15)*
        #           - (UTXO-13)   - transfer NFT                   (UTXO-16)
        #           - coinbaseTx5 - issue 1000 REISSUABLE1, change (UTXO-17)
        colorId_reissuable1 = colorIdReissuable(
            coinbase_txs[6].vout[0].scriptPubKey)
        script_reissuable1 = CP2PHK_script(colorId=colorId_reissuable,
                                           pubkey=self.pubkeys[0])

        TxFailure6 = CTransaction()
        TxFailure6.vin.append(CTxIn(COutPoint(txSuccess5.malfixsha256, 1),
                                    b""))
        TxFailure6.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 3),
                                    b""))
        TxFailure6.vin.append(CTxIn(COutPoint(txSuccess5.malfixsha256, 4),
                                    b""))
        TxFailure6.vin.append(
            CTxIn(COutPoint(coinbase_txs[4].malfixsha256, 0), b""))
        TxFailure6.vout.append(CTxOut(40, script_transfer_reissuable))
        TxFailure6.vout.append(CTxOut(30, script_transfer_nonreissuable))
        TxFailure6.vout.append(CTxOut(1, script_transfer_nft))
        TxFailure6.vout.append(CTxOut(1000, script_reissuable1))
        TxFailure6.vout.append(CTxOut(1 * COIN, change_script))
        sig_hash, err = SignatureHash(txSuccess5.vout[1].scriptPubKey,
                                      TxFailure6, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure6.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess4.vout[3].scriptPubKey,
                                      TxFailure6, 1, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure6.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess5.vout[4].scriptPubKey,
                                      TxFailure6, 2, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure6.vin[2].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(coinbase_txs[4].vout[0].scriptPubKey,
                                      TxFailure6, 3, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        TxFailure6.vin[3].scriptSig = CScript([signature])
        TxFailure6.rehash()

        test_transaction_acceptance(node,
                                    TxFailure6,
                                    accepted=False,
                                    reason=b"bad-txns-token-balance")

        #TxSuccess6 - (UTXO-11)   - transfer REISSUABLE(40)        (UTXO-14)
        #           - (UTXO-8)    - burn NON-REISSUABLE(60)        (UTXO-15)*
        #           - (UTXO-13)   - transfer NFT                   (UTXO-16)
        #           - coinbaseTx5 - change
        txSuccess6 = CTransaction()
        txSuccess6.vin.append(CTxIn(COutPoint(txSuccess5.malfixsha256, 1),
                                    b""))
        txSuccess6.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 3),
                                    b""))
        txSuccess6.vin.append(CTxIn(COutPoint(txSuccess5.malfixsha256, 4),
                                    b""))
        txSuccess6.vin.append(
            CTxIn(COutPoint(coinbase_txs[4].malfixsha256, 0), b""))
        txSuccess6.vout.append(CTxOut(40, script_transfer_reissuable))
        txSuccess6.vout.append(CTxOut(30, script_transfer_nonreissuable))
        txSuccess6.vout.append(CTxOut(1, script_transfer_nft))
        txSuccess6.vout.append(CTxOut(1 * COIN, change_script))
        sig_hash, err = SignatureHash(txSuccess5.vout[1].scriptPubKey,
                                      txSuccess6, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess6.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess4.vout[3].scriptPubKey,
                                      txSuccess6, 1, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess6.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess5.vout[4].scriptPubKey,
                                      txSuccess6, 2, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess6.vin[2].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(coinbase_txs[4].vout[0].scriptPubKey,
                                      txSuccess6, 3, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        txSuccess6.vin[3].scriptSig = CScript([signature])
        txSuccess6.rehash()

        test_transaction_acceptance(node, txSuccess6, accepted=True)

        #TxSuccess7 - coinbaseTx5 - issue 1000 REISSUABLE1, change (UTXO-17)
        txSuccess7 = CTransaction()
        txSuccess7.vin.append(
            CTxIn(COutPoint(coinbase_txs[5].malfixsha256, 0), b""))
        txSuccess7.vout.append(CTxOut(1000, script_reissuable1))
        sig_hash, err = SignatureHash(coinbase_txs[5].vout[0].scriptPubKey,
                                      txSuccess7, 0, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        txSuccess7.vin[0].scriptSig = CScript([signature])
        txSuccess7.rehash()

        test_transaction_acceptance(node, txSuccess7, accepted=True)

        #TxFailure7 - (UTXO-9,14) - aggregate REISSUABLE(25 + 40) x
        #           - (UTXO-12)   - burn NON-REISSUABLE(20)        *
        TxFailure7 = CTransaction()
        TxFailure7.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 0),
                                    b""))
        TxFailure7.vin.append(CTxIn(COutPoint(txSuccess6.malfixsha256, 0),
                                    b""))
        TxFailure7.vin.append(CTxIn(COutPoint(txSuccess5.malfixsha256, 2),
                                    b""))
        TxFailure7.vout.append(CTxOut(65, script_transfer_reissuable))
        sig_hash, err = SignatureHash(txSuccess4.vout[0].scriptPubKey,
                                      TxFailure7, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure7.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess6.vout[0].scriptPubKey,
                                      TxFailure7, 1, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure7.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess5.vout[2].scriptPubKey,
                                      TxFailure7, 2, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        TxFailure7.vin[2].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        TxFailure7.rehash()

        test_transaction_acceptance(node,
                                    TxFailure7,
                                    accepted=False,
                                    reason=b'min relay fee not met')

        #txSuccess8 - (UTXO-9,14) - aggregate REISSUABLE(25 + 40) x
        #           - (UTXO-12)   - burn NON-REISSUABLE(20)        *
        #           - coinbase[6]
        txSuccess8 = CTransaction()
        txSuccess8.vin.append(CTxIn(COutPoint(txSuccess4.malfixsha256, 0),
                                    b""))
        txSuccess8.vin.append(CTxIn(COutPoint(txSuccess6.malfixsha256, 0),
                                    b""))
        txSuccess8.vin.append(CTxIn(COutPoint(txSuccess5.malfixsha256, 2),
                                    b""))
        txSuccess8.vin.append(
            CTxIn(COutPoint(coinbase_txs[6].malfixsha256, 0), b""))
        txSuccess8.vout.append(CTxOut(65, script_transfer_reissuable))
        sig_hash, err = SignatureHash(txSuccess4.vout[0].scriptPubKey,
                                      txSuccess8, 0, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess8.vin[0].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(txSuccess6.vout[0].scriptPubKey,
                                      txSuccess8, 1, SIGHASH_ALL)
        signature = self.privkeys[1].sign(sig_hash) + b'\x01'
        txSuccess8.vin[1].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[1])])
        sig_hash, err = SignatureHash(txSuccess5.vout[2].scriptPubKey,
                                      txSuccess8, 2, SIGHASH_ALL)
        signature = self.privkeys[0].sign(sig_hash) + b'\x01'
        txSuccess8.vin[2].scriptSig = CScript(
            [signature, hex_str_to_bytes(self.pubkeys[0])])
        sig_hash, err = SignatureHash(coinbase_txs[6].vout[0].scriptPubKey,
                                      txSuccess8, 3, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        txSuccess8.vin[3].scriptSig = CScript([signature])
        txSuccess8.rehash()

        test_transaction_acceptance(node, txSuccess8, accepted=True)

        #TxFailure8 - (UTXO-17)   - convert REISSUABLE to NON-REISSUABLE
        TxFailure8 = CTransaction()
        TxFailure8.vin.append(CTxIn(COutPoint(txSuccess7.malfixsha256, 0),
                                    b""))
        TxFailure8.vout.append(CTxOut(60, script_transfer_nonreissuable))
        sig_hash, err = SignatureHash(txSuccess7.vout[0].scriptPubKey,
                                      TxFailure8, 0, SIGHASH_ALL)
        signature = self.coinbase_key.sign(sig_hash) + b'\x01'
        TxFailure8.vin[0].scriptSig = CScript([signature])
        TxFailure8.rehash()

        test_transaction_acceptance(node,
                                    TxFailure8,
                                    accepted=False,
                                    reason=b'invalid-colorid')
class RPCSendRawTransactions(ComparisonTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.genesisactivationheight = 600
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.locking_script = CScript([self.coinbase_pubkey, OP_CHECKSIG])
        self.default_args = [
            '-debug', '-maxgenesisgracefulperiod=0',
            '-genesisactivationheight=%d' % self.genesisactivationheight
        ]
        self.extra_args = [self.default_args] * self.num_nodes

    def run_test(self):
        self.test.run()

    # Sign a transaction, using the key we know about.
    # This signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    def check_mempool(self, rpc, should_be_in_mempool, timeout=20):
        wait_until(lambda: set(rpc.getrawmempool()) ==
                   {t.hash
                    for t in should_be_in_mempool},
                   timeout=timeout)

    # Generating transactions in order so first transaction's output will be an input for second transaction
    def get_chained_transactions(self,
                                 spend,
                                 num_of_transactions,
                                 money_to_spend=5000000000):
        txns = []
        for _ in range(0, num_of_transactions):
            money_to_spend = money_to_spend - 1000  # one satoshi to fee
            tx = create_transaction(spend.tx, spend.n, b"", money_to_spend,
                                    self.locking_script)
            self.sign_tx(tx, spend.tx, spend.n)
            tx.rehash()
            txns.append(tx)
            spend = PreviousSpendableOutput(tx, 0)
        return txns

    # Create a required number of chains with equal length.
    def get_txchains_n(self, num_of_chains, chain_length, spend):
        if num_of_chains > len(spend):
            raise Exception('Insufficient number of spendable outputs.')
        txchains = []
        for x in range(0, num_of_chains):
            txchains += self.get_chained_transactions(spend[x], chain_length)
        return txchains

    # Test an expected valid results, depending on node's configuration.
    def run_scenario1(self,
                      conn,
                      num_of_chains,
                      chain_length,
                      spend,
                      allowhighfees=False,
                      dontcheckfee=False,
                      useRpcWithDefaults=False,
                      shuffle_txs=False,
                      timeout=30):
        # Create and send tx chains.
        txchains = self.get_txchains_n(num_of_chains, chain_length, spend)
        # Shuffle txs if it is required
        if shuffle_txs:
            random.shuffle(txchains)
        # Prepare inputs for sendrawtransactions
        rpc_txs_bulk_input = []
        for tx in range(len(txchains)):
            # Collect txn input data for bulk submit through rpc interface.
            if useRpcWithDefaults:
                rpc_txs_bulk_input.append({'hex': ToHex(txchains[tx])})
            else:
                rpc_txs_bulk_input.append({
                    'hex': ToHex(txchains[tx]),
                    'allowhighfees': allowhighfees,
                    'dontcheckfee': dontcheckfee
                })
        # Submit bulk tranactions.
        rejected_txns = conn.rpc.sendrawtransactions(rpc_txs_bulk_input)
        # There should be no rejected transactions.
        assert_equal(len(rejected_txns), 0)
        # Check if required transactions are accepted by the mempool.
        self.check_mempool(conn.rpc, txchains, timeout)

    # Test an expected invalid results and invalid input data conditions.
    def run_scenario2(self, conn, timeout=30):
        #
        # sendrawtransactions with missing input #
        #
        inputs = [{
            'txid':
            "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000",
            'vout': 1
        }]
        # won't exists
        outputs = {conn.rpc.getnewaddress(): 4.998}
        rawtx = conn.rpc.createrawtransaction(inputs, outputs)
        rawtx = conn.rpc.signrawtransaction(rawtx)

        rejected_txns = conn.rpc.sendrawtransactions([{'hex': rawtx['hex']}])

        assert_equal(len(rejected_txns['invalid']), 1)
        # Reject invalid
        assert_equal(rejected_txns['invalid'][0]['reject_code'], 16)
        assert_equal(rejected_txns['invalid'][0]['reject_reason'],
                     "missing-inputs")
        # No transactions should be in the mempool.
        assert_equal(conn.rpc.getmempoolinfo()['size'], 0)

        #
        # An empty json array of objects.
        #
        assert_raises_rpc_error(
            -8, "Invalid parameter: An empty json array of objects",
            conn.rpc.sendrawtransactions, [])

        #
        # An empty json object.
        #
        assert_raises_rpc_error(-8, "Invalid parameter: An empty json object",
                                conn.rpc.sendrawtransactions, [{}])

        #
        # Missing the hex string of the raw transaction.
        #
        assert_raises_rpc_error(
            -8,
            "Invalid parameter: Missing the hex string of the raw transaction",
            conn.rpc.sendrawtransactions, [{
                'dummy_str': 'dummy_value'
            }])
        assert_raises_rpc_error(
            -8,
            "Invalid parameter: Missing the hex string of the raw transaction",
            conn.rpc.sendrawtransactions, [{
                'hex': -1
            }])

        #
        # TX decode failed.
        #
        assert_raises_rpc_error(-22, "TX decode failed",
                                conn.rpc.sendrawtransactions,
                                [{
                                    'hex': '050000000100000000a0ce6e35'
                                }])

        #
        # allowhighfees: Invalid value
        #
        assert_raises_rpc_error(-8, "allowhighfees: Invalid value",
                                conn.rpc.sendrawtransactions,
                                [{
                                    'hex': rawtx['hex'],
                                    'allowhighfees': -1
                                }])
        assert_raises_rpc_error(-8, "allowhighfees: Invalid value",
                                conn.rpc.sendrawtransactions,
                                [{
                                    'hex': rawtx['hex'],
                                    'allowhighfees': 'dummy_value'
                                }])

        #
        # dontcheckfee: Invalid value
        #
        assert_raises_rpc_error(-8, "dontcheckfee: Invalid value",
                                conn.rpc.sendrawtransactions,
                                [{
                                    'hex': rawtx['hex'],
                                    'dontcheckfee': -1
                                }])
        assert_raises_rpc_error(-8, "dontcheckfee: Invalid value",
                                conn.rpc.sendrawtransactions,
                                [{
                                    'hex': rawtx['hex'],
                                    'dontcheckfee': 'dummy_value'
                                }])

    # Test an attempt to submit transactions (via rpc interface) which are already known
    #   - received earlier through the p2p interface and not processed yet
    def run_scenario3(self,
                      conn,
                      num_of_chains,
                      chain_length,
                      spend,
                      allowhighfees=False,
                      dontcheckfee=False,
                      timeout=30):
        # Create and send tx chains.
        txchains = self.get_txchains_n(num_of_chains, chain_length, spend)
        # Prepare inputs for sendrawtransactions
        rpc_txs_bulk_input = []
        for tx in range(len(txchains)):
            # Collect txn input data for bulk submit through rpc interface.
            rpc_txs_bulk_input.append({
                'hex': ToHex(txchains[tx]),
                'allowhighfees': allowhighfees,
                'dontcheckfee': dontcheckfee
            })
            # Send a txn, one by one, through p2p interface.
            conn.send_message(msg_tx(txchains[tx]))
        # Check if there is an expected number of transactions in the validation queues
        # - this scenario relies on ptv delayed processing
        wait_until(lambda: conn.rpc.getblockchainactivity()["transactions"] ==
                   num_of_chains * chain_length,
                   timeout=timeout)
        # Submit a batch of txns through rpc interface.
        rejected_txns = conn.rpc.sendrawtransactions(rpc_txs_bulk_input)
        # There should be num_of_chains * chain_length rejected transactions.
        # - there are num_of_chains*chain_length known transactions
        #   - due to the fact that all were received through the p2p interface
        #   - all are waiting in the ptv queues
        assert_equal(len(rejected_txns['known']), num_of_chains * chain_length)
        # No transactions should be in the mempool.
        assert_equal(conn.rpc.getmempoolinfo()['size'], 0)

    # Test duplicated input data set submitted through the rpc interface.
    # - input data are shuffled
    def run_scenario4(self,
                      conn,
                      num_of_chains,
                      chain_length,
                      spend,
                      allowhighfees=False,
                      dontcheckfee=False,
                      timeout=30):
        # Create and send tx chains.
        txchains = self.get_txchains_n(num_of_chains, chain_length, spend)
        # Prepare duplicated inputs for sendrawtransactions
        rpc_txs_bulk_input = []
        for tx in range(len(txchains)):
            rpc_txs_bulk_input.append({
                'hex': ToHex(txchains[tx]),
                'allowhighfees': allowhighfees,
                'dontcheckfee': dontcheckfee
            })
            rpc_txs_bulk_input.append({
                'hex': ToHex(txchains[tx]),
                'allowhighfees': allowhighfees,
                'dontcheckfee': dontcheckfee
            })
        # Shuffle inputs.
        random.shuffle(rpc_txs_bulk_input)
        # Submit bulk input.
        rejected_txns = conn.rpc.sendrawtransactions(rpc_txs_bulk_input)
        # There should be rejected known transactions.
        assert_equal(len(rejected_txns), 1)
        assert_equal(len(rejected_txns['known']), num_of_chains * chain_length)
        assert (set(rejected_txns['known']) == {t.hash for t in txchains})
        # Check if required transactions are accepted by the mempool.
        self.check_mempool(conn.rpc, txchains, timeout)

    def get_tests(self):
        rejected_txs = []

        def on_reject(conn, msg):
            rejected_txs.append(msg)

        # Shorthand for functions
        block = self.chain.next_block
        node = self.nodes[0]
        self.chain.set_genesis_hash(int(node.getbestblockhash(), 16))

        # Create a new block
        block(0, coinbase_pubkey=self.coinbase_pubkey)
        self.chain.save_spendable_output()
        yield self.accepted()

        # Now we need that block to mature so we can spend the coinbase.
        # Also, move block height on beyond Genesis activation.
        test = TestInstance(sync_every_block=False)
        for i in range(600):
            block(5000 + i, coinbase_pubkey=self.coinbase_pubkey)
            test.blocks_and_transactions.append([self.chain.tip, True])
            self.chain.save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on.
        out = []
        for i in range(200):
            out.append(self.chain.get_spendable_output())

        self.stop_node(0)

        #====================================================================
        # Valid test cases.
        # - a bulk submit of txns through sendrawtransactions rpc interface
        # - all transactions are valid and accepted by the mempool
        #====================================================================

        # Scenario 1 (TS1).
        # This test case checks a bulk submit of txs, through rpc sendrawtransactions interface, with default params.
        # - 1K txs used
        # - allowhighfees=False (default)
        # - dontcheckfee=False (default)
        # - txn chains are in ordered sequence (no orphans should be detected during processing)
        #
        # Test case config
        num_of_chains = 10
        chain_length = 100
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=100', '-maxorphantxsize=0',
            '-limitancestorcount=100', '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS1: {} chains of length {}. Default params for rpc call.'.
                format(num_of_chains, chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario1(conn,
                               num_of_chains,
                               chain_length,
                               out,
                               useRpcWithDefaults=True,
                               timeout=20)

        # Scenario 2 (TS2).
        # This test case checks a bulk submit of txs, through rpc sendrawtransactions interface, with default params.
        # - 1K txs used
        # - allowhighfees=False (default)
        # - dontcheckfee=False (default)
        # - txn chains are shuffled (orphans should be detected during processing)
        #
        # Test case config
        num_of_chains = 10
        chain_length = 100
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=0', '-limitancestorcount=100',
            '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS2: {} chains of length {}. Shuffled txs. Default params for rpc call.'
                .format(num_of_chains, chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario1(conn,
                               num_of_chains,
                               chain_length,
                               out,
                               useRpcWithDefaults=True,
                               shuffle_txs=True,
                               timeout=20)

        # Scenario 3 (TS3).
        # This test case checks a bulk submit of txs, through rpc sendrawtransactions interface, with default params.
        # - 10K txs used
        # - allowhighfees=False (default)
        # - dontcheckfee=False (default)
        # - txn chains are in ordered sequence (no orphans should be detected during processing)
        #
        # Test case config
        num_of_chains = 100
        chain_length = 100
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=0', '-maxorphantxsize=0',
            '-limitancestorcount=100', '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS3: {} chains of length {}. Default params for rpc call.'.
                format(num_of_chains, chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario1(conn,
                               num_of_chains,
                               chain_length,
                               out,
                               useRpcWithDefaults=True,
                               timeout=30)

        # Scenario 4 (TS5).
        # This test case checks a bulk submit of txs, through rpc sendrawtransactions interface, with explicitly declared default params.
        # - 1K txs used
        # - allowhighfees=False (an explicit default value)
        # - dontcheckfee=False (an explicit default value)
        # - txn chains are in ordered sequence (no orphans should be detected during processing)
        #
        # Test case config
        num_of_chains = 10
        chain_length = 100
        allowhighfees = False
        dontcheckfee = False
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=100', '-maxorphantxsize=0',
            '-limitancestorcount=100', '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS4: {} chains of length {}. allowhighfees={}, dontcheckfee={}.'
                .format(num_of_chains, chain_length, str(allowhighfees),
                        str(dontcheckfee)),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario1(conn,
                               num_of_chains,
                               chain_length,
                               out,
                               allowhighfees,
                               dontcheckfee,
                               timeout=20)

        # Scenario 5 (TS5).
        # This test case checks a bulk submit of txs, through rpc sendrawtransactions interface, with non-default params.
        # - 1K txs used
        # - allowhighfees=True
        # - dontcheckfee=True
        # - txn chains are in ordered sequence (no orphans should be detected during processing)
        #
        # Test case config
        num_of_chains = 10
        chain_length = 100
        allowhighfees = True
        dontcheckfee = True
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=100', '-maxorphantxsize=0',
            '-limitancestorcount=100', '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS5: {} chains of length {}. allowhighfees={}, dontcheckfee={}.'
                .format(num_of_chains, chain_length, str(allowhighfees),
                        str(dontcheckfee)),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario1(conn,
                               num_of_chains,
                               chain_length,
                               out,
                               allowhighfees,
                               dontcheckfee,
                               timeout=20)

        #====================================================================
        # Invalid test cases and non-empty rejects
        # - test invalid data
        # - test rejected transactions
        # - test duplicates
        #====================================================================

        # Scenario 6 (TS6).
        #
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=100', '-maxorphantxsize=0',
            '-limitancestorcount=100', '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS6: Invalid conditions',
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario2(conn, timeout=20)

        # Scenario 7 (TS7).
        #
        # Test case config
        num_of_chains = 10
        chain_length = 10
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=10000',
            '-maxstdtxnsperthreadratio=0',  # Do not take any std txs for processing (from the ptv queues).
            '-maxnonstdtxnsperthreadratio=0',  # Do not take any non-std txs for processing (from the ptv queues).
            '-checkmempool=0',
            '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS7: {} chains of length {}. Reject known transactions'.
                format(num_of_chains, chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario3(conn,
                               num_of_chains,
                               chain_length,
                               out,
                               timeout=30)

        # Scenario 8 (TS8).
        # This test case checks a bulk submit of duplicated txs, through rpc sendrawtransactions interface.
        # - 2K txs used (1K are detected as duplicates - known transactions in the result set)
        # - rpc input data set is shuffled
        #
        # Test case config
        num_of_chains = 10
        chain_length = 100
        # Node's config
        args = [
            '-txnvalidationasynchrunfreq=0', '-limitancestorcount=100',
            '-checkmempool=0', '-persistmempool=0'
        ]
        with self.run_node_with_connections(
                'TS8: {} chains of length {}. Test duplicated inputs.'.format(
                    num_of_chains, chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            self.run_scenario4(conn,
                               num_of_chains,
                               chain_length,
                               out,
                               timeout=20)
Esempio n. 19
0
class FullBlockTest(ComparisonTestFramework):

    ''' Can either run this test as 1 node with expected answers, or two and compare them. 
        Change the "outcome" variable from each TestInstance object to only do the comparison. '''
    def __init__(self):
        super().__init__()
        self.num_nodes = 1
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.block_time = int(time.time())+1
        self.tip = None
        self.blocks = {}

    def run_test(self):
        test = TestManager(self, self.options.tmpdir)
        test.add_all_connections(self.nodes)
        NetworkThread().start() # Start up network handling in another thread
        test.run()

    def add_transactions_to_block(self, block, tx_list):
        [ tx.rehash() for tx in tx_list ]
        block.vtx.extend(tx_list)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        return block
    
    # Create a block on top of self.tip, and advance self.tip to point to the new block
    # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
    # and rest will go to fees.
    def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
        if self.tip == None:
            base_block_hash = self.genesis_hash
        else:
            base_block_hash = self.tip.sha256
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, self.block_time)
        if (spend != None):
            tx = CTransaction()
            tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), b"", 0xffffffff))  # no signature yet
            # This copies the java comparison tool testing behavior: the first
            # txout has a garbage scriptPubKey, "to make sure we're not
            # pre-verifying too much" (?)
            tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            # Now sign it if necessary
            scriptSig = b""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                (sighash, err) = SignatureHash(
                    spend.tx.vout[spend.n].scriptPubKey,
                    tx,
                    0,
                    SIGHASH_ALL,
                    spend.tx.vout[spend.n].nValue,
                    SAPLING_BRANCH_ID,
                )
                scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            block = self.add_transactions_to_block(block, [tx])
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        self.block_time += 1
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previous marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected():
            return TestInstance([[self.tip, False]])
       
        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # creates a new block and advances the tip to that block
        block = self.next_block


        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()


        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(100):
            block(1000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test


        # Start by building a couple of blocks on top (which output is spent is in parentheses):
        #     genesis -> b1 (0) -> b2 (1)
        out0 = get_spendable_output()
        block(1, spend=out0)
        save_spendable_output()
        yield accepted()

        out1 = get_spendable_output()
        block(2, spend=out1)
        # Inv again, then deliver twice (shouldn't break anything).
        yield accepted()


        # so fork like this:
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1)
        # 
        # Nothing should happen at this point. We saw b2 first so it takes priority.
        tip(1)
        block(3, spend=out1)
        # Deliver twice (should still not break anything)
        yield rejected()


        # Now we add another block to make the alternative chain longer.
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1) -> b4 (2)
        out2 = get_spendable_output()
        block(4, spend=out2)
        yield accepted()


        # ... and back to the first chain.
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                      \-> b3 (1) -> b4 (2)
        tip(2)
        block(5, spend=out2)
        save_spendable_output()
        yield rejected()

        out3 = get_spendable_output()
        block(6, spend=out3)
        yield accepted()


        # Try to create a fork that double-spends
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                          \-> b7 (2) -> b8 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(7, spend=out2)
        yield rejected()

        out4 = get_spendable_output()
        block(8, spend=out4)
        yield rejected()


        # Try to create a block that has too much fee
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                                    \-> b9 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(6)
        block(9, spend=out4, additional_coinbase_value=1)
        yield rejected()

        
        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b10 (3) -> b11 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(10, spend=out3)
        yield rejected()

        block(11, spend=out4, additional_coinbase_value=1)
        yield rejected()


        # Try again, but with a valid fork first
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
        #                                              (b12 added last)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        b12 = block(12, spend=out3)
        save_spendable_output()
        #yield TestInstance([[b12, False]])
        b13 = block(13, spend=out4)
        # Deliver the block header for b12, and the block b13.
        # b13 should be accepted but the tip won't advance until b12 is delivered.
        yield TestInstance([[CBlockHeader(b12), None], [b13, False]])

        save_spendable_output()
        out5 = get_spendable_output()
        # b14 is invalid, but the node won't know that until it tries to connect
        # Tip still can't advance because b12 is missing
        block(14, spend=out5, additional_coinbase_value=1)
        yield rejected()

        yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.

        
        # Test that a block with a lot of checksigs is okay
        lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50 - 1))
        tip(13)
        block(15, spend=out5, script=lots_of_checksigs)
        yield accepted()


        # Test that a block with too many checksigs is rejected
        out6 = get_spendable_output()
        too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50))
        block(16, spend=out6, script=too_many_checksigs)
        yield rejected()
class BSV_RPC_verifyscript(BitcoinTestFramework):
    def set_test_params(self):
        self.setup_clean_chain = True
        self.num_nodes = 1
        # genesis height is specified so that we can check if script verification flags are properly set
        self.genesisactivationheight = 103
        self.extra_args = [[
            "-genesisactivationheight=%d" % self.genesisactivationheight
        ]]

        # Private key used in scripts with CHECKSIG
        self.prvkey = CECKey()
        self.prvkey.set_secretbytes(b"horsebattery")
        self.pubkey = self.prvkey.get_pubkey()

    # Return a transaction that spends given anyone-can-spend coinbase and provides
    # several outputs that can be used in test to verify scripts
    def create_test_tx(self, coinbase_tx):
        assert_equal(coinbase_tx.vout[0].nValue,
                     50 * COIN)  # we expect 50 coins

        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(coinbase_tx.sha256, 0), b"", 0xffffffff))

        # Simple anyone-can-spend output
        tx.vout.append(CTxOut(int(1 * COIN), CScript([OP_TRUE])))

        # Output using standard P2PKH script
        tx.vout.append(
            CTxOut(
                int(1 * COIN),
                CScript([
                    OP_DUP, OP_HASH160,
                    hash160(self.pubkey), OP_EQUALVERIFY, OP_CHECKSIG
                ])))

        # Another simple anyone-can-spend output
        tx.vout.append(CTxOut(int(1 * COIN), CScript([OP_TRUE])))

        # Final output provides remaining coins and is not needed by test
        tx.vout.append(CTxOut(int(47 * COIN), CScript([OP_FALSE])))

        tx.rehash()
        return tx

    # Sign input 0 in tx spending n-th output from spend_tx using self.prvkey
    def sign_tx(self, tx, spend_tx, n):
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.prvkey.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])), self.pubkey
        ])

    def verifyscript_check(self, node, expected_result, scripts, *args):
        N = len(scripts)
        assert_equal(len(expected_result), N)
        result = node.verifyscript(scripts, *args)
        assert_equal(len(result), N)
        for i in range(N):
            if result[i]["result"] != expected_result[i]:
                raise AssertionError("Unexpected script verification result " +
                                     str(i) + "! Expected '" +
                                     expected_result[i] + "' got " +
                                     str(result[i]))
        return result

    def verifyscript_check_ok(self, node, scripts, *args):
        return self.verifyscript_check(node,
                                       len(scripts) * ["ok"], scripts, *args)

    def verifyscript_check_error(self, node, scripts, *args):
        return self.verifyscript_check(node,
                                       len(scripts) * ["error"], scripts,
                                       *args)

    def run_test(self):
        node = self.nodes[0]

        # Create spendable coinbase transaction
        coinbase_tx = make_coinbase(node)
        node.generate(99)

        # Create, send and mine test transaction
        tx_test = self.create_test_tx(coinbase_tx)
        node.sendrawtransaction(ToHex(tx_test), False,
                                True)  # disable fee check
        assert_equal(node.getrawmempool(), [tx_test.hash])
        node.generate(1)
        assert_equal(node.getrawmempool(), [])

        tip_hash = node.getbestblockhash()

        #
        # Check parameter parsing
        #
        tx0 = create_tx(tx_test, 0, 1 * COIN)
        assert_raises_rpc_error(
            -8, "Missing required argument", node.verifyscript
        )  # 1st parameter scripts is required and must be JSON array of objects
        assert_raises_rpc_error(-1, None, node.verifyscript, "abc")
        assert_raises_rpc_error(-1, None, node.verifyscript, 123)
        assert_raises_rpc_error(-1, None, node.verifyscript, True)
        assert_raises_rpc_error(-1, None, node.verifyscript, {})
        assert_raises_rpc_error(-1, None, node.verifyscript, ["abc"])
        assert_raises_rpc_error(-1, None, node.verifyscript, [123])
        assert_raises_rpc_error(-1, None, node.verifyscript, [True])

        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], "abc")  # 2nd parameter stopOnFirstInvalid is boolean
        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], 0)
        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], [])
        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], {})

        assert_raises_rpc_error(
            -8, "Invalid value for totalTimeout", node.verifyscript,
            [{
                "tx": ToHex(tx0),
                "n": 0
            }], True, -1)  # 3rd parameter totalTimeout is non-negative integer
        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], True, "abc")
        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], True, True)
        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], True, [])
        assert_raises_rpc_error(-1, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0
        }], True, {})

        assert_raises_rpc_error(-8, "Too many arguments", node.verifyscript,
                                [{
                                    "tx": ToHex(tx0),
                                    "n": 0
                                }], True, 100, "abc")  # max 3 arguments

        #
        # Check scripts parameter parsing
        #
        assert_raises_rpc_error(-8, "Missing", node.verifyscript,
                                [{}])  # tx and n fields are required
        assert_raises_rpc_error(-8, "Missing scripts[0].n", node.verifyscript,
                                [{
                                    "tx": ToHex(tx0)
                                }])
        assert_raises_rpc_error(-8, "Missing scripts[0].tx", node.verifyscript,
                                [{
                                    "n": 0
                                }])
        assert_raises_rpc_error(-8, "Missing scripts[1].n", node.verifyscript,
                                [{
                                    "tx": ToHex(tx0),
                                    "n": 0
                                }, {
                                    "tx": ToHex(tx0)
                                }])
        assert_raises_rpc_error(-8, "Missing scripts[1].tx", node.verifyscript,
                                [{
                                    "tx": ToHex(tx0),
                                    "n": 0
                                }, {
                                    "n": 0
                                }])

        assert_raises_rpc_error(-22, "TX decode failed for scripts[0].tx",
                                node.verifyscript, [{
                                    "tx": "",
                                    "n": 0
                                }])  # tx must be a hex string of a transaction
        assert_raises_rpc_error(-22, "TX decode failed for scripts[0].tx",
                                node.verifyscript, [{
                                    "tx": "01abc",
                                    "n": 0
                                }])
        assert_raises_rpc_error(-22, "TX decode failed for scripts[0].tx",
                                node.verifyscript, [{
                                    "tx": "00",
                                    "n": 0
                                }])
        assert_raises_rpc_error(-8, "Invalid value for n in scripts[0]",
                                node.verifyscript, [{
                                    "tx": ToHex(tx0),
                                    "n": -1
                                }])  # n must be non-negative integer
        assert_raises_rpc_error(
            -8, "Both flags and prevblockhash specified in scripts[0]",
            node.verifyscript, [{
                "tx": ToHex(tx0),
                "n": 0,
                "flags": 0,
                "prevblockhash": tip_hash
            }])  # both flags and prevblockhash are not allowed
        assert_raises_rpc_error(-8, "Unknown block", node.verifyscript, [{
            "tx":
            ToHex(tx0),
            "n":
            0,
            "prevblockhash":
            "0000000000000000000000000000000000000000000000000000000000000000"
        }])  # invalid block hash

        assert_raises_rpc_error(-3, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0,
            "txo": 0
        }])  # txo must be JSON object with three fields
        assert_raises_rpc_error(-3, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0,
            "txo": "abc"
        }])
        assert_raises_rpc_error(-3, None, node.verifyscript, [{
            "tx": ToHex(tx0),
            "n": 0,
            "txo": True
        }])
        assert_raises_rpc_error(-8, "Missing scripts[0].txo.lock",
                                node.verifyscript, [{
                                    "tx": ToHex(tx0),
                                    "n": 0,
                                    "txo": {
                                        "value": 1,
                                        "height": 0
                                    }
                                }])
        assert_raises_rpc_error(-8, "Missing scripts[0].txo.value",
                                node.verifyscript, [{
                                    "tx": ToHex(tx0),
                                    "n": 0,
                                    "txo": {
                                        "lock": "00",
                                        "height": 0
                                    }
                                }])
        assert_raises_rpc_error(-8, "Missing scripts[0].txo.height",
                                node.verifyscript, [{
                                    "tx": ToHex(tx0),
                                    "n": 0,
                                    "txo": {
                                        "lock": "00",
                                        "value": 1
                                    }
                                }])
        assert_raises_rpc_error(-8, "must be hexadecimal string",
                                node.verifyscript, [{
                                    "tx": ToHex(tx0),
                                    "n": 0,
                                    "txo": {
                                        "lock": "01abc",
                                        "value": 1,
                                        "height": 0
                                    }
                                }])  # lock must be hexstring
        self.verifyscript_check_ok(node, [{
            "tx":
            ToHex(create_transaction(tx_test, 0, CScript([OP_TRUE]),
                                     1 * COIN)),
            "n":
            0,
            "txo": {
                "lock": "",
                "value": 1 * COIN,
                "height": 0
            }
        }])  # empty lock script is valid
        assert_raises_rpc_error(-8, "Invalid value for scripts[0].txo.value",
                                node.verifyscript, [{
                                    "tx": ToHex(tx0),
                                    "n": 0,
                                    "txo": {
                                        "lock": "00",
                                        "value": -1,
                                        "height": 0
                                    }
                                }])  # value must be non-negative integer
        assert_raises_rpc_error(
            -8, "Invalid value for scripts[0].txo.height", node.verifyscript,
            [{
                "tx": ToHex(tx0),
                "n": 0,
                "txo": {
                    "lock": "00",
                    "value": 1,
                    "height": -2
                }
            }])  # height must be non-negative integer or -1

        assert_raises_rpc_error(
            -8, "Unable to find TXO spent by transaction scripts[0].tx",
            node.verifyscript, [{
                "tx": ToHex(create_tx(tx0, 0, 1 * COIN)),
                "n": 0
            }])  # Check that non-existent coin is detected

        #
        # Check verification of a valid P2PKH script
        #
        tx1 = create_tx(tx_test, 1, 1 * COIN)
        self.sign_tx(tx1, tx_test, 1)
        expected_flags = 81931  # this is the expected value for automatically determined script verification flags
        res = self.verifyscript_check_ok(
            node,
            [
                # Automatically find TXO and block
                {
                    "tx": ToHex(tx1),
                    "n": 0,
                    "reportflags":
                    True  # report actual flags used by script verification
                },
                # Explicitly provide TXO and block
                {
                    "tx": ToHex(tx1),
                    "n": 0,
                    "reportflags": True,
                    "prevblockhash": tip_hash,
                    "txo": {
                        "lock": bytes_to_hex_str(tx_test.vout[0].scriptPubKey),
                        "value": tx_test.vout[0].nValue,
                        "height": node.getblockcount()
                    }
                },
                # Explicitly provide script verification flags
                {
                    "tx": ToHex(tx1),
                    "n": 0,
                    "flags": expected_flags,
                    "reportflags": True,
                    "txo": {
                        "lock": bytes_to_hex_str(tx_test.vout[0].scriptPubKey),
                        "value": tx_test.vout[0].nValue
                    }
                },
                # Explicitly provide script verification flags and automatically determine TXO flags
                {
                    "tx": ToHex(tx1),
                    "n": 0,
                    "flags": expected_flags ^ (
                        1 << 19
                    ),  # mess up value of SCRIPT_UTXO_AFTER_GENESIS flag that is always set from TXO
                    "reportflags": True,
                    "txo": {
                        "lock": bytes_to_hex_str(tx_test.vout[0].scriptPubKey),
                        "value": tx_test.vout[0].nValue,
                        "height": node.getblockcount()
                    }
                },
                # Once more without reporting flags
                {
                    "tx": ToHex(tx1),
                    "n": 0
                }
            ])

        # Check that automatically determined script flags are as expected
        assert_equal(res[0]["flags"], expected_flags)
        assert_equal(res[1]["flags"], expected_flags)
        assert_equal(res[2]["flags"], expected_flags)
        assert_equal(res[3]["flags"], expected_flags)
        assert (not "flags" in res[4])

        # Changing the output value must make the script invalid
        tx2 = create_tx(tx_test, 1, 1 * COIN)
        self.sign_tx(tx2, tx_test, 1)
        tx2.vout[0].nValue = int(0.9 * COIN)
        self.verifyscript_check_error(node, [{"tx": ToHex(tx2), "n": 0}])

        #
        # Check working of stopOnFirstInvalid
        #
        self.verifyscript_check(node, ["error", "ok"], [{
            "tx": ToHex(tx2),
            "n": 0
        }, {
            "tx": ToHex(tx1),
            "n": 0
        }])
        self.verifyscript_check(node, ["error", "ok"], [{
            "tx": ToHex(tx2),
            "n": 0
        }, {
            "tx": ToHex(tx1),
            "n": 0
        }], False)  # default for stopOnFirstInvalid is False
        self.verifyscript_check(node, ["error", "skipped"], [{
            "tx": ToHex(tx2),
            "n": 0
        }, {
            "tx": ToHex(tx1),
            "n": 0
        }], True)

        #
        # Check that TXO is also found in mempool
        #
        tx3 = create_tx(tx_test, 0, 1 * COIN)
        node.sendrawtransaction(ToHex(tx3), False, True)
        assert_equal(node.getrawmempool(), [tx3.hash])
        tx4 = create_tx(tx3, 0, 1 * COIN)
        self.verifyscript_check_ok(node, [{"tx": ToHex(tx4), "n": 0}])

        #
        # Check that genesis related script flags are selected after some height
        #

        # Generating one more block should place us one block below genesis activation
        # but mempool should be already be at genesis height.
        node.generate(1)
        assert_equal(node.getblockcount(), self.genesisactivationheight - 1)

        # Flags should now also include SCRIPT_GENESIS and SCRIPT_VERIFY_SIGPUSHONLY
        # but not SCRIPT_UTXO_AFTER_GENESIS, because TXO is still before genesis.
        res = self.verifyscript_check_ok(node, [{
            "tx": ToHex(tx4),
            "n": 0,
            "reportflags": True
        }])
        assert_equal(res[0]["flags"], expected_flags + 262144 + 32)

        # Send this transaction so that we have a spendable coin created after genesis
        node.sendrawtransaction(ToHex(tx4), False, True)
        assert_equal(node.getrawmempool(), [tx4.hash])
        node.generate(1)
        assert_equal(node.getrawmempool(), [])
        assert_equal(node.getblockcount(), self.genesisactivationheight
                     )  # tip should now be at genesis height

        # Transaction spending coin that was created after genesis
        tx5 = create_tx(tx4, 0, 1 * COIN)

        # Now flags should (besides SCRIPT_GENESIS and SCRIPT_VERIFY_SIGPUSHONLY) also
        # include SCRIPT_UTXO_AFTER_GENESIS, because TXO is also after genesis.
        res = self.verifyscript_check_ok(node, [{
            "tx": ToHex(tx5),
            "n": 0,
            "reportflags": True
        }])
        assert_equal(res[0]["flags"], expected_flags + 524288 + 262144 + 32)

        #
        # Check timeout detection
        #
        self.verifyscript_check(node, ["skipped", "skipped"], [{
            "tx": ToHex(tx1),
            "n": 0
        }, {
            "tx": ToHex(tx1),
            "n": 0
        }], True, 0)  # everything must be skipped if timeout is 0
        self.verifyscript_check(node, ["skipped", "skipped"], [{
            "tx": ToHex(tx1),
            "n": 0
        }, {
            "tx": ToHex(tx1),
            "n": 0
        }], False, 0)

        # Restart the node to allow unlimited script size and large numbers
        self.restart_node(
            0, self.extra_args[0] +
            ["-maxscriptsizepolicy=0", "-maxscriptnumlengthpolicy=250000"])

        # Create, send and mine transaction with large anyone-can-spend lock script
        tx6 = create_tx(tx_test, 2, 1 * COIN)
        tx6.vout[0] = CTxOut(
            int(1 * COIN),
            CScript([
                bytearray([42] * 250000),
                bytearray([42] * 200 * 1000), OP_MUL, OP_DROP, OP_TRUE
            ]))
        tx6.rehash()
        node.sendrawtransaction(ToHex(tx6), False, True)
        assert_equal(node.getrawmempool(), [tx6.hash])
        node.generate(1)
        assert_equal(node.getrawmempool(), [])

        # This transaction should take more than 100ms and less than 2000ms to verify
        # NOTE: If verification takes more or less time than this, some of the checks below will fail.
        #       This can, for example, happen on a very fast, very slow or busy machine.
        tx7 = create_tx(tx6, 0, 1 * COIN)

        # First tx is small and should be successfully verified.
        # Second tx is big and its verification should timeout.
        # Verification of third tx should be skipped even if stopOnFirstInvalid is false because maximum allowed total verification time was already exceeded.
        self.verifyscript_check(node, ["ok", "timeout", "skipped"],
                                [{
                                    "tx": ToHex(tx1),
                                    "n": 0
                                }, {
                                    "tx": ToHex(tx7),
                                    "n": 0
                                }, {
                                    "tx": ToHex(tx1),
                                    "n": 0
                                }], False, 100)

        # If we allow enough time, verification of second tx should still timeout because of maxstdtxvalidationduration.
        self.verifyscript_check(node, ["ok", "timeout", "ok"],
                                [{
                                    "tx": ToHex(tx1),
                                    "n": 0
                                }, {
                                    "tx": ToHex(tx7),
                                    "n": 0
                                }, {
                                    "tx": ToHex(tx1),
                                    "n": 0
                                }], False, 2000)

        # Restart the node with larger value for maxstdtxvalidationduration so that its
        # default value does not limit maximum execution time of single script.
        self.restart_node(
            0, self.extra_args[0] + [
                "-maxstdtxvalidationduration=2000",
                "-maxnonstdtxvalidationduration=2001",
                "-maxscriptsizepolicy=0", "-maxscriptnumlengthpolicy=250000"
            ])

        # Verification of all three scripts should now succeed if total timeout is large enough ...
        self.verifyscript_check(node, ["ok", "ok", "ok"], [{
            "tx": ToHex(tx1),
            "n": 0
        }, {
            "tx": ToHex(tx7),
            "n": 0
        }, {
            "tx": ToHex(tx1),
            "n": 0
        }], False, 2000)

        # ... and timeout as before if it is not
        self.verifyscript_check(node, ["ok", "timeout", "skipped"],
                                [{
                                    "tx": ToHex(tx1),
                                    "n": 0
                                }, {
                                    "tx": ToHex(tx7),
                                    "n": 0
                                }, {
                                    "tx": ToHex(tx1),
                                    "n": 0
                                }], False, 100)
Esempio n. 21
0
class PVQTimeoutTest(ComparisonTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.genesisactivationheight = 600
        # The coinbase key used.
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        # Locking scripts used in the test.
        self.locking_script_1 = CScript([self.coinbase_pubkey, OP_CHECKSIG])
        self.locking_script_2 = CScript([1, 1, OP_ADD, OP_DROP])
        self.locking_script_3 = CScript([
            bytearray([42] * DEFAULT_SCRIPT_NUM_LENGTH_POLICY_AFTER_GENESIS),
            bytearray([42] * 200 * 1000), OP_MUL, OP_DROP
        ])

        self.default_args = [
            '-debug', '-maxgenesisgracefulperiod=0',
            '-genesisactivationheight=%d' % self.genesisactivationheight
        ]
        self.extra_args = [self.default_args] * self.num_nodes

    def run_test(self):
        self.test.run()

    def check_rejected(self, rejected_txs, should_be_rejected_tx_set):
        wait_until(lambda: {tx.data
                            for tx in rejected_txs} ==
                   {o.sha256
                    for o in should_be_rejected_tx_set},
                   timeout=20)

    def check_mempool(self, rpc, should_be_in_mempool, timeout=20):
        wait_until(lambda: set(rpc.getrawmempool()) ==
                   {t.hash
                    for t in should_be_in_mempool},
                   timeout=timeout)

    # Sign a transaction, using the key we know about.
    # This signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    # A helper function to generate new txs spending all outpoints from prev_txs set.
    def generate_transactons(self,
                             prev_txs,
                             unlocking_script,
                             locking_script,
                             fee=2000000,
                             factor=10):
        generated_txs = []
        for prev_tx in prev_txs:
            for n, vout in enumerate(prev_tx.vout):
                tx = CTransaction()
                out_val = vout.nValue - fee
                tx.vout.extend((CTxOut(out_val, locking_script), ) * factor)
                tx.vin.append(
                    CTxIn(COutPoint(prev_tx.sha256, n), unlocking_script,
                          0xffffffff))
                tx.calc_sha256()
                generated_txs.append(tx)

        return generated_txs

    # Generate transactions in order so the first transaction's output will be an input for the second transaction.
    def get_chained_txs(self, spend, num_of_txs, unlocking_script,
                        locking_script, money_to_spend, vout_size):
        txns = []
        for _ in range(0, num_of_txs):
            # Create a new transaction.
            tx = create_transaction(spend.tx, spend.n, unlocking_script,
                                    money_to_spend, locking_script)
            # Extend the number of outputs to the required vout_size size.
            tx.vout.extend(tx.vout * (vout_size - 1))
            # Sign txn.
            self.sign_tx(tx, spend.tx, spend.n)
            tx.rehash()
            txns.append(tx)
            # Use the first outpoint to spend in the second iteration.
            spend = PreviousSpendableOutput(tx, 0)

        return txns

    # Create a required number of chains with equal length.
    # - each tx is configured to have vout_size outpoints with the same locking_script.
    def get_txchains_n(self, num_of_chains, chain_length, spend,
                       unlocking_script, locking_script, money_to_spend,
                       vout_size):
        if num_of_chains > len(spend):
            raise Exception('Insufficient number of spendable outputs.')
        txchains = []
        for x in range(0, num_of_chains):
            txchains += self.get_chained_txs(spend[x], chain_length,
                                             unlocking_script, locking_script,
                                             money_to_spend, vout_size)

        return txchains

    # A helper function to create and send a set of tx chains.
    def generate_and_send_txchains_n(self,
                                     conn,
                                     num_of_chains,
                                     chain_length,
                                     spend,
                                     locking_script,
                                     money_to_spend=2000000,
                                     vout_size=10,
                                     timeout=60):
        # Create and send txs. In this case there will be num_txs_to_create txs of chain length equal 1.
        txchains = self.get_txchains_n(num_of_chains, chain_length, spend,
                                       CScript(), locking_script,
                                       money_to_spend, vout_size)
        for tx in range(len(txchains)):
            conn.send_message(msg_tx(txchains[tx]))
        # Check if the validation queues are empty.
        wait_until(
            lambda: conn.rpc.getblockchainactivity()["transactions"] == 0,
            timeout=timeout)

        return txchains

    #
    # Pre-defined testing scenarios.
    #

    # This scenario is being used to generate and send a set of standard txs in test cases.
    # - there will be num_txs_to_create txs of chain length equal 1.
    def run_scenario1(self,
                      conn,
                      spend,
                      num_txs_to_create,
                      locking_script,
                      money_to_spend=2000000,
                      vout_size=10,
                      timeout=60):
        return self.generate_and_send_txchains_n(conn, num_txs_to_create, 1,
                                                 spend, locking_script,
                                                 money_to_spend, vout_size,
                                                 timeout)

    # This scenario is being used to generate and send a set of non-standard txs in test cases.
    # - there will be num_txs_to_create txs of chain length equal 1.
    def run_scenario2(self,
                      conn,
                      spend,
                      num_txs_to_create,
                      locking_script,
                      additional_txs=[],
                      shuffle_txs=False,
                      money_to_spend=2000000,
                      timeout=60):
        # A handler to catch any reject messages.
        # - it is expected to get only 'too-long-validation-time' reject msgs.
        rejected_txs = []

        def on_reject(conn, msg):
            assert_equal(msg.reason, b'too-long-validation-time')
            rejected_txs.append(msg)

        conn.cb.on_reject = on_reject

        # Create and send tx chains with non-std outputs.
        # - one tx with vout_size=num_txs_to_create outpoints will be created
        txchains = self.generate_and_send_txchains_n(conn, 1, 1, spend,
                                                     locking_script,
                                                     money_to_spend,
                                                     num_txs_to_create,
                                                     timeout)

        # Check if required transactions are accepted by the mempool.
        self.check_mempool(conn.rpc, txchains, timeout)

        # Create a new block
        # - having an empty mempool (before submitting non-std txs) will simplify further checks.
        conn.rpc.generate(1)

        # Create and send transactions spending non-std outputs.
        nonstd_txs = self.generate_transactons(txchains, CScript([OP_TRUE]),
                                               locking_script)
        all_txs = nonstd_txs + additional_txs
        if shuffle_txs:
            random.shuffle(all_txs)
        for tx in all_txs:
            conn.send_message(msg_tx(tx))
        # Check if the validation queues are empty.
        wait_until(
            lambda: conn.rpc.getblockchainactivity()["transactions"] == 0,
            timeout=timeout)

        return nonstd_txs + additional_txs, rejected_txs

    def get_tests(self):
        # Shorthand for functions
        block = self.chain.next_block
        node = self.nodes[0]
        self.chain.set_genesis_hash(int(node.getbestblockhash(), 16))

        # Create a new block
        block(0, coinbase_pubkey=self.coinbase_pubkey)
        self.chain.save_spendable_output()
        yield self.accepted()

        # Now we need that block to mature so we can spend the coinbase.
        # Also, move block height on beyond Genesis activation.
        test = TestInstance(sync_every_block=False)
        for i in range(600):
            block(5000 + i, coinbase_pubkey=self.coinbase_pubkey)
            test.blocks_and_transactions.append([self.chain.tip, True])
            self.chain.save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on.
        out = []
        for i in range(200):
            out.append(self.chain.get_spendable_output())

        self.stop_node(0)

        #
        # Test Case 1 (TC1).
        #
        # - 10 standard txs used
        # - 1 peer connected to node0
        # All txs emplaced initially in the standard validation queue are processed and accepted by the mempool.
        # - None txn is rejected with a reason 'too-long-validation-time' (not moved into the non-std queue).
        #
        # The number of txs used in the test case.
        tc1_txs_num = 10
        # Select funding transactions to use:
        # - tc1_txs_num funding transactions are needed in this test case.
        spend_txs = out[0:tc1_txs_num]
        args = [
            '-checkmempool=0',
            '-persistmempool=0',
            '-maxstdtxvalidationduration=500',  # increasing max validation time ensures that timeout doesn't occur for standard txns, even on slower machines and on debug build
            '-maxnonstdtxnsperthreadratio=0'
        ]  # setting it to zero ensures that non-standard txs won't be processed (if there are any queued).
        with self.run_node_with_connections(
                'TC1: {} txs detected as std and then accepted.'.format(
                    tc1_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            std_txs = self.run_scenario1(conn, spend_txs, tc1_txs_num,
                                         self.locking_script_1)
            # Check if required transactions are accepted by the mempool.
            self.check_mempool(conn.rpc, std_txs, timeout=30)
            assert_equal(conn.rpc.getmempoolinfo()['size'], tc1_txs_num)

        #
        # Test Case 2 (TC2).
        #
        # - 10 non-standard txs (with a simple locking script) used.
        # - 1 peer connected to node0.
        # The test case creates rejected txns with a reason 'too-long-validation-time' for all txs initially emplaced into the standard queue.
        # - those rejects are not taken into account to create reject messages (see explanation - point 6)
        # All txns are then forwarded to the non-standard validation queue where the validation timeout is longer (sufficient).
        #
        # The number of txs used in the test case.
        tc2_txs_num = 10
        # Select funding transactions to use:
        # - one funding transaction is needed in this test case.
        spend_txs = out[tc1_txs_num:tc1_txs_num + 1]
        args = ['-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections(
                'TC2: {} txs with small bignums detected as non-std txs and then finally accepted.'
                .format(tc2_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            nonstd_txs, rejected_txs = self.run_scenario2(
                conn, spend_txs, tc2_txs_num, self.locking_script_2)
            # No transactions should be rejected
            assert_equal(len(rejected_txs), 0)
            # Check if required transactions are accepted by the mempool.
            self.check_mempool(conn.rpc, nonstd_txs, timeout=30)
            assert_equal(conn.rpc.getmempoolinfo()['size'], tc2_txs_num)

        #
        # Test Case 3 (TC3).
        #
        # - 10 non-standard txs (with a complex locking script) used.
        # - 1 peer connected to node0
        # The test case creates rejected txns with a reason 'too-long-validation-time' for all txs initially emplaced into the standard queue.
        # - those rejects are not taken into account to create reject messages (see explanation - point 6)
        # All txns are then forwarded to the non-standard validation queue where the validation timeout is longer (sufficient).
        #
        # The number of txs used in the test case.
        tc3_txs_num = 10
        # Select funding transactions to use:
        # - one funding transaction is needed in this test case.
        spend_txs = out[tc1_txs_num + 1:tc1_txs_num + 2]
        args = [
            '-checkmempool=0',
            '-persistmempool=0',
            '-maxnonstdtxvalidationduration=100000',  # On slow/busy machine txn validation times have to be high
            '-maxtxnvalidatorasynctasksrunduration=100001',  # This needs to mehigher then maxnonstdtxvalidationduration
            '-maxscriptsizepolicy=0'
        ]
        with self.run_node_with_connections(
                'TC3: {} txs with large bignums detected as non-std txs and then finally accepted.'
                .format(tc3_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            nonstd_txs, rejected_txs = self.run_scenario2(
                conn, spend_txs, tc3_txs_num, self.locking_script_3)
            # No transactions should be rejected
            assert_equal(len(rejected_txs), 0)
            # Check if required transactions are accepted by the mempool.
            self.check_mempool(conn.rpc, nonstd_txs, timeout=30)
            assert_equal(conn.rpc.getmempoolinfo()['size'], tc3_txs_num)

        #
        # Test Case 4 (TC4).
        #
        # - 10 non-standard txs (with a complex locking script) used.
        # - 1 peer connected to node0
        # The test case creates rejected txns with a reason 'too-long-validation-time' for all txs initially emplaced into the standard queue.
        # - those rejects are not taken into account to create reject messages (see explanation - point 6)
        # All txns are then forwarded to the non-standard validation queue.
        # - due to insufficient timeout config all txs are rejected again with 'too-long-validation-time' reject reason.
        # - reject messages are created for each and every txn.
        #
        # The number of txs used in the test case.
        tc4_txs_num = 10
        # Select funding transactions to use:
        # - one funding transaction is needed in this test case.
        spend_txs = out[tc1_txs_num + 2:tc1_txs_num + 3]
        args = [
            '-checkmempool=0', '-persistmempool=0', '-maxscriptsizepolicy=0'
        ]
        with self.run_node_with_connections(
                'TC4: {} txs with large bignums detected as non-std txs and then finally rejected.'
                .format(tc4_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            nonstd_txs, rejected_txs = self.run_scenario2(
                conn, spend_txs, tc4_txs_num, self.locking_script_3)
            # Check rejected transactions.
            self.check_rejected(rejected_txs, nonstd_txs)
            assert_equal(len(rejected_txs), tc4_txs_num)
            # The mempool should be empty at this stage.
            assert_equal(conn.rpc.getmempoolinfo()['size'], 0)

        #
        # Test Case 5 (TC5).
        #
        # - 100 standard txs used.
        # - 10 non-standard (with a simple locking script) txs used.
        # - 1 peer connected to node0.
        # This test case is a combination of TC1 & TC2
        # - the set of std and non-std txs is shuffled before sending it to the node.
        #
        # The number of txs used in the test case.
        tc5_1_txs_num = 100
        tc5_2_txs_num = 10
        # Select funding transactions to use:
        # - tc5_1_txs_num+1 funding transactions are needed in this test case.
        spend_txs = out[tc1_txs_num + 3:tc1_txs_num + 3 + tc5_1_txs_num]
        spend_txs2 = out[tc1_txs_num + 3 + tc5_1_txs_num:tc1_txs_num + 4 +
                         tc5_1_txs_num]
        args = ['-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections(
                'TC5: The total of {} std and nonstd txs processed and accepted.'
                .format(tc5_1_txs_num + tc5_2_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            std_txs = self.get_txchains_n(tc5_1_txs_num, 1, spend_txs,
                                          CScript(), self.locking_script_1,
                                          2000000, 10)
            std_and_nonstd_txs, rejected_txs = self.run_scenario2(
                conn,
                spend_txs2,
                tc5_2_txs_num,
                self.locking_script_2,
                std_txs,
                shuffle_txs=True)
            # Check if required transactions are accepted by the mempool.
            self.check_mempool(conn.rpc, std_and_nonstd_txs, timeout=30)
            assert_equal(conn.rpc.getmempoolinfo()['size'],
                         tc5_1_txs_num + tc5_2_txs_num)
Esempio n. 22
0
class FullBlockTest(ComparisonTestFramework):

    ''' Can either run this test as 1 node with expected answers, or two and compare them. 
        Change the "outcome" variable from each TestInstance object to only do the comparison. '''
    def __init__(self):
        self.num_nodes = 1
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.block_time = int(time.time())+1
        self.tip = None
        self.blocks = {}

    def run_test(self):
        test = TestManager(self, self.options.tmpdir)
        test.add_all_connections(self.nodes)
        NetworkThread().start() # Start up network handling in another thread
        sync_popnodes(self.nodes)
        test.run()

    def add_transactions_to_block(self, block, tx_list):
        [ tx.rehash() for tx in tx_list ]
        block.vtx.extend(tx_list)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        return block
    
    # Create a block on top of self.tip, and advance self.tip to point to the new block
    # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
    # and rest will go to fees.
    def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
        if self.tip == None:
            base_block_hash = self.genesis_hash
        else:
            base_block_hash = self.tip.sha256
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, self.block_time)
        if (spend != None):
            tx = CTransaction()
            tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), b"", 0xffffffff))  # no signature yet
            # This copies the java comparison tool testing behavior: the first
            # txout has a garbage scriptPubKey, "to make sure we're not
            # pre-verifying too much" (?)
            tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            # Now sign it if necessary
            scriptSig = b""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)
                scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            block = self.add_transactions_to_block(block, [tx])
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        self.block_time += 1
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previous marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject = None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])
       
        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # add transactions to a block produced by next_block
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            old_hash = block.sha256
            self.add_transactions_to_block(block, new_transactions)
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            self.block_heights[block.sha256] = self.block_heights[old_hash]
            del self.block_heights[old_hash]
            self.blocks[block_number] = block
            return block

        # creates a new block and advances the tip to that block
        block = self.next_block


        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()


        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(1000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test


        # Start by building a couple of blocks on top (which output is spent is
        # in parentheses):
        #     genesis -> b1 (0) -> b2 (1)
        out0 = get_spendable_output()
        block(1, spend=out0)
        save_spendable_output()
        yield accepted()

        out1 = get_spendable_output()
        b2 = block(2, spend=out1)
        yield accepted()


        # so fork like this:
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1)
        # 
        # Nothing should happen at this point. We saw b2 first so it takes priority.
        tip(1)
        b3 = block(3, spend=out1)
        txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1)
        yield rejected()


        # Now we add another block to make the alternative chain longer.
        # 
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1) -> b4 (2)
        out2 = get_spendable_output()
        block(4, spend=out2)
        yield accepted()


        # ... and back to the first chain.
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                      \-> b3 (1) -> b4 (2)
        tip(2)
        block(5, spend=out2)
        save_spendable_output()
        yield rejected()

        out3 = get_spendable_output()
        block(6, spend=out3)
        yield accepted()


        # Try to create a fork that double-spends
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                          \-> b7 (2) -> b8 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(7, spend=out2)
        yield rejected()

        out4 = get_spendable_output()
        block(8, spend=out4)
        yield rejected()


        # Try to create a block that has too much fee
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                                    \-> b9 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(6)
        block(9, spend=out4, additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))

        
        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b10 (3) -> b11 (4)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        block(10, spend=out3)
        yield rejected()

        block(11, spend=out4, additional_coinbase_value=1)
        yield rejected(RejectResult(16, b'bad-cb-amount'))


        # Try again, but with a valid fork first
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
        #                                              (b12 added last)
        #                      \-> b3 (1) -> b4 (2)
        tip(5)
        b12 = block(12, spend=out3)
        save_spendable_output()
        #yield TestInstance([[b12, False]])
        b13 = block(13, spend=out4)
        # Deliver the block header for b12, and the block b13.
        # b13 should be accepted but the tip won't advance until b12 is delivered.
        yield TestInstance([[CBlockHeader(b12), None], [b13, False]])

        save_spendable_output()
        out5 = get_spendable_output()
        # b14 is invalid, but the node won't know that until it tries to connect
        # Tip still can't advance because b12 is missing
        block(14, spend=out5, additional_coinbase_value=1)
        yield rejected()

        yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.

        # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
        #                      \-> b3 (1) -> b4 (2)
        
        # Test that a block with a lot of checksigs is okay
        lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50 - 1))
        tip(13)
        block(15, spend=out5, script=lots_of_checksigs)
        yield accepted()


        # Test that a block with too many checksigs is rejected
        out6 = get_spendable_output()
        too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50))
        block(16, spend=out6, script=too_many_checksigs)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))


        # Attempt to spend a transaction created on a different fork
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        block(17, spend=txout_b3)
        yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))

        # Attempt to spend a transaction created on a different fork (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b18 (b3.vtx[1]) -> b19 (6)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(18, spend=txout_b3)
        yield rejected()

        block(19, spend=out6)
        yield rejected()

        # Attempt to spend a coinbase at depth too low
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        out7 = get_spendable_output()
        block(20, spend=out7)
        yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))

        # Attempt to spend a coinbase at depth too low (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b21 (6) -> b22 (5)
        #                      \-> b3 (1) -> b4 (2)
        tip(13)
        block(21, spend=out6)
        yield rejected()

        block(22, spend=out5)
        yield rejected()

        # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
        #                                                                           \-> b24 (6) -> b25 (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        b23 = block(23, spend=out6)
        old_hash = b23.sha256
        tx = CTransaction()
        script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69
        script_output = CScript([b'\x00' * script_length])
        tx.vout.append(CTxOut(0, script_output))
        tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1)))
        b23 = update_block(23, [tx])
        # Make sure the math above worked out to produce a max-sized block
        assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE)
        yield accepted()

        # Make the next block one byte bigger and check that it fails
        tip(15)
        b24 = block(24, spend=out6)
        script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69
        script_output = CScript([b'\x00' * (script_length+1)])
        tx.vout = [CTxOut(0, script_output)]
        b24 = update_block(24, [tx])
        assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1)
        yield rejected(RejectResult(16, b'bad-blk-length'))

        b25 = block(25, spend=out7)
        yield rejected()

        # Create blocks with a coinbase input script size out of range
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
        #                                                                           \-> ... (6) -> ... (7)
        #                      \-> b3 (1) -> b4 (2)
        tip(15)
        b26 = block(26, spend=out6)
        b26.vtx[0].vin[0].scriptSig = b'\x00'
        b26.vtx[0].rehash()
        # update_block causes the merkle root to get updated, even with no new
        # transactions, and updates the required state.
        b26 = update_block(26, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b26 chain to make sure bitcoind isn't accepting b26
        b27 = block(27, spend=out7)
        yield rejected()

        # Now try a too-large-coinbase script
        tip(15)
        b28 = block(28, spend=out6)
        b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
        b28.vtx[0].rehash()
        b28 = update_block(28, [])
        yield rejected(RejectResult(16, b'bad-cb-length'))

        # Extend the b28 chain to make sure bitcoind isn't accepted b28
        b29 = block(29, spend=out7)
        # TODO: Should get a reject message back with "bad-prevblk", except
        # there's a bug that prevents this from being detected.  Just note
        # failure for now, and add the reject result later.
        yield rejected()

        # b30 has a max-sized coinbase scriptSig.
        tip(23)
        b30 = block(30)
        b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
        b30.vtx[0].rehash()
        b30 = update_block(30, [])
        yield accepted()
Esempio n. 23
0
class PBVWithSigOps(BitcoinTestFramework):

    def set_test_params(self):
        self.setup_clean_chain = True
        self.num_nodes = 1
        self.extra_args = [["-whitelist=127.0.0.1"]]
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.chain = ChainManager()

    def sign_expensive_tx(self, tx, spend_tx, n, sigChecks):
        sighash = SignatureHashForkId(
            spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spend_tx.vout[n].nValue)

        tx.vin[0].scriptSig = CScript(
            [self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])),
             self.coinbase_pubkey] * sigChecks
            + [self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])),
               self.coinbase_pubkey])

    def get_hard_transactions(self, spend, money_to_spend, num_of_transactions, num_of_sig_checks, expensive_script):
        txns = []
        for _ in range(0, num_of_transactions):
            money_to_spend = money_to_spend - 1  # one satoshi to fee
            tx2 = create_transaction(spend.tx, spend.n, b"", money_to_spend, CScript(expensive_script))
            sign_tx(tx2, spend.tx, spend.n, self.coinbase_key)
            tx2.rehash()
            txns.append(tx2)

            money_to_spend = money_to_spend - 1
            tx3 = create_transaction(tx2, 0, b"", money_to_spend, scriptPubKey=CScript([OP_TRUE]))
            self.sign_expensive_tx(tx3, tx2, 0, num_of_sig_checks)
            tx3.rehash()
            txns.append(tx3)

            spend = PreviousSpendableOutput(tx3, 0)
        return txns

    def run_test(self):
        block_count = 0

        # Create a P2P connection
        node0 = NodeConnCB()
        connection = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0)
        node0.add_connection(connection)

        network_thread = NetworkThread()
        network_thread.start()
        # wait_for_verack ensures that the P2P connection is fully up.
        node0.wait_for_verack()

        self.chain.set_genesis_hash(int(self.nodes[0].getbestblockhash(), 16))

        _, out, block_count = prepare_init_chain(self.chain, 101, 100, block_0=False, start_block=0, node=node0)

        self.log.info("waiting for block height 101 via rpc")
        self.nodes[0].waitforblockheight(101)

        block1_num = block_count - 1

        # num of sig operations in one transaction
        num_of_sig_checks = 70

        expensive_scriptPubKey = [OP_DUP, OP_HASH160, hash160(self.coinbase_pubkey),
                                  OP_EQUALVERIFY, OP_CHECKSIG, OP_DROP] * num_of_sig_checks + [OP_DUP, OP_HASH160,
                                                                                               hash160(
                                                                                                   self.coinbase_pubkey),
                                                                                               OP_EQUALVERIFY,
                                                                                               OP_CHECKSIG]

        money_to_spend = 5000000000
        spend = out[0]

        block2_hard = self.chain.next_block(block_count)

        # creates 4000 hard transaction and 4000 transaction to spend them. It will be 8k transactions in total
        add_txns = self.get_hard_transactions(spend, money_to_spend=money_to_spend, num_of_transactions=4000,
                                              num_of_sig_checks=num_of_sig_checks,
                                              expensive_script=expensive_scriptPubKey)
        self.chain.update_block(block_count, add_txns)
        block_count += 1
        self.log.info(f"block2_hard hash: {block2_hard.hash}")

        self.chain.set_tip(block1_num)
        block3_easier = self.chain.next_block(block_count)
        add_txns = self.get_hard_transactions(spend, money_to_spend=money_to_spend, num_of_transactions=1000,
                                              num_of_sig_checks=num_of_sig_checks,
                                              expensive_script=expensive_scriptPubKey)
        self.chain.update_block(block_count, add_txns)
        self.log.info(f"block3_easier hash: {block3_easier.hash}")

        node0.send_message(msg_block(block2_hard))
        node0.send_message(msg_block(block3_easier))

        def wait_for_log():
            text_activation = f"Block {block2_hard.hash} was not activated as best"
            text_block2 = "Verify 8000 txins"
            text_block3 = "Verify 2000 txins"
            results = 0
            for line in open(glob.glob(self.options.tmpdir + "/node0" + "/regtest/bitcoind.log")[0]):
                if text_activation in line:
                    results += 1
                elif text_block2 in line:
                    results += 1
                elif text_block3 in line:
                    results += 1
            return True if results == 3 else False

        # wait that everything is written to the log
        # try accounting for slower machines by having a large timeout
        wait_until(wait_for_log, timeout=120)

        text_activation = f"Block {block2_hard.hash} was not activated as best"
        text_block2 = "Verify 8000 txins"
        text_block3 = "Verify 2000 txins"
        for line in open(glob.glob(self.options.tmpdir + "/node0" + "/regtest/bitcoind.log")[0]):
            if text_activation in line:
                self.log.info(f"block2_hard was not activated as block3_easy won the validation race")
            elif text_block2 in line:
                line = line.split()
                self.log.info(f"block2_hard took {line[len(line) - 1]} to verify")
            elif text_block3 in line:
                line = line.split()
                self.log.info(f"block3_easy took {line[len(line)-1]} to verify")

        assert_equal(block3_easier.hash, self.nodes[0].getbestblockhash())
        node0.connection.close()
Esempio n. 24
0
class TxnPropagationAfterBlock(ComparisonTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.tip = None
        self.block_time = None
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.extra_args = [[
            '-broadcastdelay=50000', '-txnpropagationfreq=50000'
        ]] * self.num_nodes

    def run_test(self):
        self.test.run()

    # Sign a transaction, using the key we know about.
    # This signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
            tx.vin[0].scriptSig = CScript()
            return
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    # Create a new block with some number of valid spending txns
    def next_block(self,
                   number,
                   spend=None,
                   additional_coinbase_value=0,
                   script=CScript([OP_TRUE])):
        if self.chain.tip == None:
            base_block_hash = self.chain._genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.chain.tip.sha256
            block_time = self.chain.tip.nTime + 1
        # First create the coinbase
        height = self.chain.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        coinbase.rehash()
        if spend == None:
            block = create_block(base_block_hash, coinbase, block_time)
        else:
            # All but one satoshi for each txn to fees
            for s in spend:
                coinbase.vout[0].nValue += s.tx.vout[s.n].nValue - 1
                coinbase.rehash()
            block = create_block(base_block_hash, coinbase, block_time)
            # Add as many txns as required
            for s in spend:
                # Spend 1 satoshi
                tx = create_transaction(s.tx, s.n, b"", 1, script)
                self.sign_tx(tx, s.tx, s.n)
                self.chain.add_transactions_to_block(block, [tx])
                block.hashMerkleRoot = block.calc_merkle_root()
        # Do PoW, which is very inexpensive on regnet
        block.solve()
        self.chain.tip = block
        self.chain.block_heights[block.sha256] = height
        assert number not in self.chain.blocks
        self.chain.blocks[number] = block
        return block

    def get_tests(self):
        node = self.nodes[0]
        self.chain.set_genesis_hash(int(node.getbestblockhash(), 16))

        # Create a new block
        self.next_block(0)
        self.chain.save_spendable_output()
        yield self.accepted()

        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(140):
            self.next_block(5000 + i)
            test.blocks_and_transactions.append([self.chain.tip, True])
            self.chain.save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(100):
            out.append(self.chain.get_spendable_output())

        # Create blocks with multiple txns in
        block1 = self.next_block(1, spend=out[0:20])
        block2 = self.next_block(2, spend=out[20:40])

        # Check frequency propagator runs has been correctly set to very slow (we will poke as required)
        assert_equal(self.nodes[0].getnetworkinfo()['txnpropagationfreq'],
                     50000)

        # Get half of the txns from each block into the peers inventory queue
        for t in range(1, 11):
            self.nodes[0].sendrawtransaction(
                bytes_to_hex_str(block1.vtx[t].serialize()), True)
            self.nodes[0].sendrawtransaction(
                bytes_to_hex_str(block2.vtx[t].serialize()), True)
        self.nodes[0].settxnpropagationfreq(50000)
        wait_until(lambda: self.nodes[0].getpeerinfo()[0]['txninvsize'] == 20)

        # Feed in the other half of txns to just the txn propagator queue
        for t in range(11, 21):
            self.nodes[0].sendrawtransaction(
                bytes_to_hex_str(block1.vtx[t].serialize()), True)
            self.nodes[0].sendrawtransaction(
                bytes_to_hex_str(block2.vtx[t].serialize()), True)
        assert_equal(self.nodes[0].getnetworkinfo()['txnpropagationqlen'], 20)
        assert_equal(self.nodes[0].getmempoolinfo()['size'], 40)

        # Mine the first new block
        yield TestInstance([[block1, True]])

        # Check the txns from the mined block have gone from the propagator queue and the nodes queue
        assert_equal(self.nodes[0].getnetworkinfo()['txnpropagationqlen'], 10)
        assert_equal(self.nodes[0].getpeerinfo()[0]['txninvsize'], 10)
        assert_equal(self.nodes[0].getmempoolinfo()['size'], 20)
Esempio n. 25
0
class PTVP2PTest(ComparisonTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        self.genesisactivationheight = 600
        # The coinbase key used.
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        # Locking scripts used in the test.
        self.locking_script_1 = CScript([self.coinbase_pubkey, OP_CHECKSIG])
        self.locking_script_2 = CScript([1, 1, OP_ADD, OP_DROP])

        self.default_args = [
            '-debug', '-maxgenesisgracefulperiod=0',
            '-genesisactivationheight=%d' % self.genesisactivationheight
        ]
        self.extra_args = [self.default_args] * self.num_nodes

    def run_test(self):
        self.test.run()

    def check_rejected(self, rejected_txs, should_be_rejected_tx_set):
        wait_until(lambda: {tx.data
                            for tx in rejected_txs} ==
                   {o.sha256
                    for o in should_be_rejected_tx_set},
                   timeout=20)

    def check_mempool(self, rpc, should_be_in_mempool, timeout=20):
        wait_until(lambda: set(rpc.getrawmempool()) ==
                   {t.hash
                    for t in should_be_in_mempool},
                   timeout=timeout)

    def check_mempool_with_subset(self, rpc, should_be_in_mempool, timeout=20):
        wait_until(lambda: {t.hash
                            for t in should_be_in_mempool}.issubset(
                                set(rpc.getrawmempool())),
                   timeout=timeout)

    def check_intersec_with_mempool(self, rpc, txs_set):
        return set(rpc.getrawmempool()).intersection(t.hash for t in txs_set)

    def get_front_slice(self, spends, num):
        txs_slice = spends[0:num]
        del spends[0:num]
        return txs_slice

    # Sign a transaction, using the key we know about.
    # This signs input 0 in tx, which is assumed to be spending output n in spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        sighash = SignatureHashForkId(spend_tx.vout[n].scriptPubKey, tx, 0,
                                      SIGHASH_ALL | SIGHASH_FORKID,
                                      spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([
            self.coinbase_key.sign(sighash) +
            bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
        ])

    # A helper function to generate new txs spending all outpoints from prev_txs set.
    def generate_transactons(self,
                             prev_txs,
                             unlocking_script,
                             locking_script,
                             num_of_ds_txs=0,
                             fee=2000000,
                             factor=10):
        gen_txs = []
        ds_txs = []
        for prev_tx in prev_txs:
            for n, vout in enumerate(prev_tx.vout):
                tx = CTransaction()
                out_val = vout.nValue - fee
                tx.vout.extend((CTxOut(out_val, locking_script), ) * factor)
                tx.vin.append(
                    CTxIn(COutPoint(prev_tx.sha256, n), unlocking_script,
                          0xffffffff))
                # Use the first unspent txn as a common input for all double spend transactions.
                if num_of_ds_txs and len(ds_txs) < num_of_ds_txs - 1 and len(
                        gen_txs):
                    tx.vin.append(
                        CTxIn(COutPoint(prev_txs[0].sha256, 0),
                              unlocking_script, 0xffffffff))
                    tx.calc_sha256()
                    ds_txs.append(tx)
                    continue
                tx.calc_sha256()
                gen_txs.append(tx)
        # To simplify further checks, move the first unspent txn to the ds_txs set.
        if num_of_ds_txs:
            ds_txs.append(gen_txs[0])
            del gen_txs[0]
        if len(ds_txs) != num_of_ds_txs:
            raise Exception(
                'Cannot create required number of double spend txs.')
        return gen_txs, ds_txs

    # Generate transactions in order so the first transaction's output will be an input for the second transaction.
    def get_chained_txs(self, spend, num_of_txs, unlocking_script,
                        locking_script, money_to_spend, factor):
        txns = []
        for _ in range(0, num_of_txs):
            if factor == 1:
                money_to_spend = money_to_spend - 1000
            # Create a new transaction.
            tx = create_transaction(spend.tx, spend.n, unlocking_script,
                                    money_to_spend, locking_script)
            # Extend the number of outputs to the required size.
            tx.vout.extend(tx.vout * (factor - 1))
            # Sign txn.
            self.sign_tx(tx, spend.tx, spend.n)
            tx.rehash()
            txns.append(tx)
            # Use the first outpoint to spend in the second iteration.
            spend = PreviousSpendableOutput(tx, 0)

        return txns

    # Create a required number of chains with equal length.
    # - each tx is configured to have factor outpoints with the same locking_script.
    def get_txchains_n(self, num_of_chains, chain_length, spend,
                       unlocking_script, locking_script, money_to_spend,
                       factor):
        if num_of_chains > len(spend):
            raise Exception('Insufficient number of spendable outputs.')
        txchains = []
        for x in range(0, num_of_chains):
            txchains += self.get_chained_txs(spend[x], chain_length,
                                             unlocking_script, locking_script,
                                             money_to_spend, factor)

        return txchains

    # A helper function to create and send a set of tx chains.
    def generate_and_send_txchains_n(self,
                                     conn,
                                     num_of_chains,
                                     chain_length,
                                     spend,
                                     locking_script,
                                     money_to_spend=5000000000,
                                     factor=10,
                                     timeout=60):
        # Create and send txs. In this case there will be num_txs_to_create txs of chain length equal 1.
        txchains = self.get_txchains_n(num_of_chains, chain_length, spend,
                                       CScript(), locking_script,
                                       money_to_spend, factor)
        for tx in range(len(txchains)):
            conn.send_message(msg_tx(txchains[tx]))

        return txchains

    #
    # Pre-defined testing scenarios.
    #

    # This scenario is being used to generate and send a set of standard txs in test cases.
    def run_scenario1(self,
                      conn,
                      spend,
                      num_txs_to_create,
                      chain_length,
                      locking_script,
                      money_to_spend=2000000,
                      factor=10,
                      timeout=60):
        return self.generate_and_send_txchains_n(conn, num_txs_to_create,
                                                 chain_length, spend,
                                                 locking_script,
                                                 money_to_spend, factor,
                                                 timeout)

    # This scenario is being used to generate and send a set of non-standard txs in test cases.
    # - there will be num_txs_to_create txs of chain length equal 1.
    # - from a single spend 2499 txs can be created (due to value of the funding tx and value assigned to outpoints: 5000000000/2000000 = 2500)
    #   - The exact number of 2500 txs could be created by including '-limitfreerelay=1000' param in the node's config.
    #   - The value 2000000 meets requirements of sufficient fee per txn size (used in the test).
    def run_scenario2(self,
                      conn,
                      spend,
                      num_txs_to_create,
                      locking_script,
                      num_ds_to_create=0,
                      additional_txs=[],
                      shuffle_txs=False,
                      send_txs=True,
                      money_to_spend=2000000,
                      timeout=60):
        # A handler to catch reject messages.
        rejected_txs = []

        def on_reject(conn, msg):
            rejected_txs.append(msg)
            # A double spend reject message is the expected one to occur.
            assert_equal(msg.reason, b'txn-double-spend-detected')

        conn.cb.on_reject = on_reject

        # Create and send tx chains with non-std outputs.
        # - one tx with vout_size=num_txs_to_create outpoints will be created
        txchains = self.generate_and_send_txchains_n(conn, 1, 1, spend,
                                                     locking_script,
                                                     money_to_spend,
                                                     num_txs_to_create,
                                                     timeout)

        # Check if required transactions are accepted by the mempool.
        self.check_mempool(conn.rpc, txchains, timeout)

        # Create a new block
        # - having an empty mempool (before submitting non-std txs) will simplify further checks.
        conn.rpc.generate(1)

        # Create and send transactions spending non-std outputs.
        nonstd_txs, ds_txs = self.generate_transactons(txchains,
                                                       CScript([OP_TRUE]),
                                                       locking_script,
                                                       num_ds_to_create)
        all_txs = nonstd_txs + ds_txs + additional_txs
        # Shuffle txs if it is required
        if shuffle_txs:
            random.shuffle(all_txs)
        # Send txs if it is required
        if send_txs:
            for tx in all_txs:
                conn.send_message(msg_tx(tx))
        # Return ds set if was requested.
        if len(ds_txs):
            return nonstd_txs + additional_txs, ds_txs, rejected_txs

        return nonstd_txs + additional_txs, rejected_txs

    # This scenario is being used to generate and send multiple subsets of non-standard txs in test cases.
    # - scenario2 is used to prepare the required size of the set
    # - each subset is created from a different funding txn
    #   - as a result, there is no intersection between subsets
    def run_scenario3(self,
                      conn,
                      spend,
                      num_txs_to_create,
                      locking_script,
                      num_ds_to_create=0,
                      shuffle_txs=False,
                      money_to_spend=2000000,
                      timeout=60):
        all_nonstd_txs = []
        all_ds_txs = []
        # Create the set of required txs.
        for tx in spend:
            nonstd_txs, ds_txs, rejected_txs = self.run_scenario2(
                conn, [tx], num_txs_to_create, locking_script,
                num_ds_to_create, [], shuffle_txs, False, money_to_spend,
                timeout)
            all_nonstd_txs += nonstd_txs
            all_ds_txs += ds_txs
        all_txs = all_nonstd_txs + all_ds_txs
        # Shuffle txs if it is required
        if shuffle_txs:
            random.shuffle(all_txs)
        # Send txs
        for tx in all_txs:
            conn.send_message(msg_tx(tx))
        # Return ds set if was required to create.
        if len(all_ds_txs):
            return all_nonstd_txs, all_ds_txs, rejected_txs

        return all_nonstd_txs, rejected_txs

    def get_tests(self):
        # Shorthand for functions
        block = self.chain.next_block
        node = self.nodes[0]
        self.chain.set_genesis_hash(int(node.getbestblockhash(), 16))

        # Create a new block
        block(0, coinbase_pubkey=self.coinbase_pubkey)
        self.chain.save_spendable_output()
        yield self.accepted()

        # Now we need that block to mature so we can spend the coinbase.
        # Also, move block height on beyond Genesis activation.
        test = TestInstance(sync_every_block=False)
        for i in range(600):
            block(5000 + i, coinbase_pubkey=self.coinbase_pubkey)
            test.blocks_and_transactions.append([self.chain.tip, True])
            self.chain.save_spendable_output()
        yield test

        # Collect spendable outputs now to avoid cluttering the code later on.
        out = []
        for i in range(200):
            out.append(self.chain.get_spendable_output())

        self.stop_node(0)

        #
        # Test Case 1 (TC1).
        #
        # - 5000 standard txs used (100 txn chains, each of length 50)
        # - 1 peer connected to node0
        #
        # The number of txs used in the test case.
        tc1_txchains_num = 100
        tc1_tx_chain_length = 50
        # Select funding transactions to use:
        # - tc1_txchains_num funding transactions are needed in this test case.
        spend_txs = self.get_front_slice(out, tc1_txchains_num)
        args = [
            '-checkmempool=0', '-persistmempool=0', '-limitancestorcount=50',
            '-txnvalidationasynchrunfreq=100', '-numstdtxvalidationthreads=6',
            '-numnonstdtxvalidationthreads=2'
        ]
        with self.run_node_with_connections(
                'TC1: {} std txn chains used, each of length {}.'.format(
                    tc1_txchains_num, tc1_tx_chain_length),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            std_txs = self.run_scenario1(conn, spend_txs, tc1_txchains_num,
                                         tc1_tx_chain_length,
                                         self.locking_script_1, 5000000000, 1)
            wait_for_ptv_completion(conn,
                                    tc1_txchains_num * tc1_tx_chain_length)
            # Check if required transactions are accepted by the mempool.
            self.check_mempool(conn.rpc, std_txs, timeout=30)
            assert_equal(conn.rpc.getmempoolinfo()['size'],
                         tc1_txchains_num * tc1_tx_chain_length)

        #
        # Test Case 2 (TC2).
        #
        # - 2400 non-standard txs (with a simple locking script) used
        # - 1 peer connected to node0
        #
        # The number of txs used in the test case.
        tc2_txs_num = 2400
        # Select funding transactions to use:
        # - one funding transaction is needed in this test case.
        spend_txs = self.get_front_slice(out, 1)
        args = ['-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections(
                'TC2: {} non-std txs used.'.format(tc2_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            nonstd_txs, rejected_txs = self.run_scenario2(
                conn, spend_txs, tc2_txs_num, self.locking_script_2)
            wait_for_ptv_completion(conn, tc2_txs_num)
            # Check if required transactions are accepted by the mempool.
            self.check_mempool(conn.rpc, nonstd_txs, timeout=30)
            assert_equal(len(rejected_txs), 0)
            assert_equal(conn.rpc.getmempoolinfo()['size'], tc2_txs_num)

        #
        # Test Case 3 (TC3).
        #
        # - 2400 valid non-standard txs (with a simple locking script) used
        #   - 100 double spend txs used
        # - 1 peer connected to node0
        # From the double spends set only 1 txn is accepted by the mempool.
        #
        # The number of txs used in the test case.
        tc3_txs_num = 2400
        ds_txs_num = 100
        # Select funding transactions to use:
        # - one funding transaction is needed in this test case.
        spend_txs = self.get_front_slice(out, 1)
        args = ['-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections(
                'TC3: {} non-std txs ({} double spends) used.'.format(
                    tc3_txs_num, ds_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            nonstd_txs, ds_txs, _ = self.run_scenario2(conn, spend_txs,
                                                       tc3_txs_num,
                                                       self.locking_script_2,
                                                       ds_txs_num)
            wait_for_ptv_completion(conn, len(nonstd_txs) + 1)
            # All txs from the nonstd_txs result set should be accepted
            self.check_mempool_with_subset(conn.rpc, nonstd_txs, timeout=30)
            # There is one more transaction in the mempool, which is a random txn from the ds_txs set
            assert_equal(conn.rpc.getmempoolinfo()['size'],
                         len(nonstd_txs) + 1)
            # Only one txn is allowed to be in the mempool from the given ds set.
            assert_equal(
                len(self.check_intersec_with_mempool(conn.rpc, ds_txs)), 1)

        #
        # Test Case 4 (TC4).
        #
        # - 10 standard txs used (as additional input set)
        # - 2400 non-standard (with a simple locking script) txs used
        #   - 100 double spend txs used
        # - 1 peer connected to node0
        # All input txs are randomly suffled before sending.
        #
        # The number of txs used in the test case.
        tc4_1_txs_num = 10
        tc4_2_txs_num = 2400
        ds_txs_num = 100
        # Select funding transactions to use:
        # - tc4_1_txs_num+1 funding transactions are needed in this test case.
        spend_txs = self.get_front_slice(out, tc4_1_txs_num)
        spend_txs2 = self.get_front_slice(out, 1)
        args = ['-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections(
                'TC4: {} std, {} nonstd txs ({} double spends) used (shuffled set).'
                .format(tc4_1_txs_num, tc4_2_txs_num, ds_txs_num),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            # Create some additional std txs to use.
            std_txs = self.get_txchains_n(tc4_1_txs_num, 1, spend_txs,
                                          CScript(), self.locking_script_1,
                                          2000000, 10)
            # Create and send generated txs.
            std_and_nonstd_txs, ds_txs, _ = self.run_scenario2(
                conn,
                spend_txs2,
                tc4_2_txs_num,
                self.locking_script_2,
                ds_txs_num,
                std_txs,
                shuffle_txs=True)
            wait_for_ptv_completion(conn, len(std_and_nonstd_txs) + 1)
            # All txs from the std_and_nonstd_txs result set should be accepted
            self.check_mempool_with_subset(conn.rpc,
                                           std_and_nonstd_txs,
                                           timeout=30)
            # There is one more transaction in the mempool. It is a random txn from the ds_txs set
            assert_equal(conn.rpc.getmempoolinfo()['size'],
                         len(std_and_nonstd_txs) + 1)
            # Only one txn is allowed to be accepted by the mempool, from the given double spends txn set.
            assert_equal(
                len(self.check_intersec_with_mempool(conn.rpc, ds_txs)), 1)

        #
        # Test Case 5 (TC5).
        #
        # - 24K=10x2400 non-standard txs (with a simple locking script) used
        #   - 1K=10x100 double spend txs used
        # - 1 peer connected to node0
        # From each double spend set only 1 txn is accepted by the mempool.
        # - Valid non-standard txs are sent first, then double spend txs (this approach maximises a ratio of 'txn-double-spend-detected' reject msgs)
        #
        # The number of txs used in a single subset.
        tc5_txs_num = 2400
        ds_txs_num = 100
        # The number of subsets used in the test case.
        tc5_num_of_subsets = 10
        # Select funding transactions to use:
        # - tc5_num_of_subsets funding transaction are needed in this test case.
        spend_txs = self.get_front_slice(out, tc5_num_of_subsets)
        args = ['-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections(
                'TC5: {} non-std txs ({} double spends) used.'.format(
                    tc5_txs_num * tc5_num_of_subsets,
                    ds_txs_num * tc5_num_of_subsets),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            nonstd_txs, ds_txs, rejected_txs = self.run_scenario3(
                conn, spend_txs, tc5_txs_num, self.locking_script_2,
                ds_txs_num)
            wait_for_ptv_completion(conn,
                                    len(nonstd_txs) + tc5_num_of_subsets,
                                    check_interval=0.5)
            # All txs from the nonstd_txs result set should be accepted
            self.check_mempool_with_subset(conn.rpc, nonstd_txs, timeout=60)
            # There are tc5_num_of_subsets more transaction in the mempool (random txns from the ds_txs set)
            assert_equal(conn.rpc.getmempoolinfo()['size'],
                         len(nonstd_txs) + tc5_num_of_subsets)
            # Only tc5_num_of_subsets txns are allowed to be in the mempool from the given ds set.
            assert_equal(
                len(self.check_intersec_with_mempool(conn.rpc, ds_txs)),
                tc5_num_of_subsets)

        #
        # Test Case 6 (TC6).
        #
        # - 24K=10x2400 non-standard txs (with a simple locking script) used
        #   - 1K=10x100 double spend txs used
        # - 1 peer connected to node0
        # From each double spends set only 1 txn is accepted by the mempool.
        # All input txs are randomly suffled before sending.
        # - the txs set is shuffeled first so it significantly decreases 'txn-double-spend-detected' reject msgs comparing to TC5
        # - in this case 'txn-mempool-conflict' reject reason will mostly occur
        #
        # The number of txs used in a single subset.
        tc6_txs_num = 2400
        ds_txs_num = 100
        # The number of subsets used in the test case.
        tc6_num_of_subsets = 10
        # Select funding transactions to use:
        # - tc6_num_of_subsets funding transaction are needed in this test case.
        spend_txs = self.get_front_slice(out, tc6_num_of_subsets)
        args = ['-checkmempool=0', '-persistmempool=0']
        with self.run_node_with_connections(
                'TC6: {} non-std txs ({} double spends) used (shuffled set).'.
                format(tc6_txs_num * tc6_num_of_subsets,
                       ds_txs_num * tc6_num_of_subsets),
                0,
                args + self.default_args,
                number_of_connections=1) as (conn, ):
            # Run test case.
            nonstd_txs, ds_txs, rejected_txs = self.run_scenario3(
                conn,
                spend_txs,
                tc6_txs_num,
                self.locking_script_2,
                ds_txs_num,
                shuffle_txs=True)
            wait_for_ptv_completion(conn,
                                    len(nonstd_txs) + tc6_num_of_subsets,
                                    check_interval=0.5)
            # All txs from the nonstd_txs result set should be accepted
            self.check_mempool_with_subset(conn.rpc, nonstd_txs, timeout=60)
            # There are tc6_num_of_subsets more transaction in the mempool (random txns from the ds_txs set)
            assert_equal(conn.rpc.getmempoolinfo()['size'],
                         len(nonstd_txs) + tc6_num_of_subsets)
            # Only tc6_num_of_subsets txns are allowed to be in the mempool from the given ds set.
            assert_equal(
                len(self.check_intersec_with_mempool(conn.rpc, ds_txs)),
                tc6_num_of_subsets)
class FullBlockTest(ComparisonTestFramework):

    # Can either run this test as 1 node with expected answers, or two and compare them.
    # Change the "outcome" variable from each TestInstance object to only do
    # the comparison.

    def __init__(self):
        super().__init__()
        self.num_nodes = 1
        self.block_heights = {}
        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"fatstacks")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.tip = None
        self.blocks = {}
        self.excessive_block_size = 16 * ONE_MEGABYTE
        self.extra_args = [['-norelaypriority',
                            '-whitelist=127.0.0.1',
                            '-limitancestorcount=9999',
                            '-limitancestorsize=9999',
                            '-limitdescendantcount=9999',
                            '-limitdescendantsize=9999',
                            '-maxmempool=999',
                            "-excessiveblocksize=%d"
                            % self.excessive_block_size]]

    def add_options(self, parser):
        super().add_options(parser)
        parser.add_option(
            "--runbarelyexpensive", dest="runbarelyexpensive", default=True)

    def run_test(self):
        self.test = TestManager(self, self.options.tmpdir)
        self.test.add_all_connections(self.nodes)
        # Start up network handling in another thread
        NetworkThread().start()
        # Set the blocksize to 2MB as initial condition
        self.nodes[0].setexcessiveblock(self.excessive_block_size)
        self.test.run()

    def add_transactions_to_block(self, block, tx_list):
        [tx.rehash() for tx in tx_list]
        block.vtx.extend(tx_list)

    # this is a little handier to use than the version in blocktools.py
    def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        tx = create_transaction(spend_tx, n, b"", value, script)
        return tx

    # sign a transaction, using the key we know about
    # this signs input 0 in tx, which is assumed to be spending output n in
    # spend_tx
    def sign_tx(self, tx, spend_tx, n):
        scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
            tx.vin[0].scriptSig = CScript()
            return
        sighash = SignatureHashForkId(
            spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spend_tx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript(
            [self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))])

    def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        tx = self.create_tx(spend_tx, n, value, script)
        self.sign_tx(tx, spend_tx, n)
        tx.rehash()
        return tx

    def next_block(self, number, spend=None, additional_coinbase_value=0, script=None, extra_sigops=0, block_size=0, solve=True):
        """
        Create a block on top of self.tip, and advance self.tip to point to the new block
        if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend
        output, and rest will go to fees.
        """
        if self.tip == None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        if (spend != None):
            coinbase.vout[0].nValue += spend.tx.vout[
                spend.n].nValue - 1  # all but one satoshi to fees
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, block_time)
        spendable_output = None
        if (spend != None):
            tx = CTransaction()
            # no signature yet
            tx.vin.append(
                CTxIn(COutPoint(spend.tx.sha256, spend.n), b"", 0xffffffff))
            # We put some random data into the first transaction of the chain
            # to randomize ids
            tx.vout.append(
                CTxOut(0, CScript([random.randint(0, 255), OP_DROP, OP_TRUE])))
            if script == None:
                tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
            else:
                tx.vout.append(CTxOut(1, script))
            spendable_output = PreviousSpendableOutput(tx, 0)

            # Now sign it if necessary
            scriptSig = b""
            scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
            if (scriptPubKey[0] == OP_TRUE):  # looks like an anyone-can-spend
                scriptSig = CScript([OP_TRUE])
            else:
                # We have to actually sign it
                sighash = SignatureHashForkId(
                    spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spend.tx.vout[spend.n].nValue)
                scriptSig = CScript(
                    [self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))])
            tx.vin[0].scriptSig = scriptSig
            # Now add the transaction to the block
            self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
        if spendable_output != None and block_size > 0:
            while len(block.serialize()) < block_size:
                tx = CTransaction()
                script_length = block_size - len(block.serialize()) - 79
                if script_length > 510000:
                    script_length = 500000
                tx_sigops = min(
                    extra_sigops, script_length, MAX_TX_SIGOPS_COUNT)
                extra_sigops -= tx_sigops
                script_pad_len = script_length - tx_sigops
                script_output = CScript(
                    [b'\x00' * script_pad_len] + [OP_CHECKSIG] * tx_sigops)
                tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
                tx.vout.append(CTxOut(0, script_output))
                tx.vin.append(
                    CTxIn(COutPoint(spendable_output.tx.sha256, spendable_output.n)))
                spendable_output = PreviousSpendableOutput(tx, 0)
                self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
            # Make sure the math above worked out to produce the correct block size
            # (the math will fail if there are too many transactions in the block)
            assert_equal(len(block.serialize()), block_size)
            # Make sure all the requested sigops have been included
            assert_equal(extra_sigops, 0)
        if solve:
            block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    def get_tests(self):
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # returns a test case that asserts that the current tip was accepted
        def accepted():
            return TestInstance([[self.tip, True]])

        # returns a test case that asserts that the current tip was rejected
        def rejected(reject=None):
            if reject is None:
                return TestInstance([[self.tip, False]])
            else:
                return TestInstance([[self.tip, reject]])

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # adds transactions to the block and updates state
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            self.add_transactions_to_block(block, new_transactions)
            old_sha256 = block.sha256
            block.hashMerkleRoot = block.calc_merkle_root()
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            if block.sha256 != old_sha256:
                self.block_heights[
                    block.sha256] = self.block_heights[old_sha256]
                del self.block_heights[old_sha256]
            self.blocks[block_number] = block
            return block

        # shorthand for functions
        block = self.next_block

        # Create a new block
        block(0)
        save_spendable_output()
        yield accepted()

        # Now we need that block to mature so we can spend the coinbase.
        test = TestInstance(sync_every_block=False)
        for i in range(99):
            block(5000 + i)
            test.blocks_and_transactions.append([self.tip, True])
            save_spendable_output()
        yield test

        # collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(100):
            out.append(get_spendable_output())

        # Let's build some blocks and test them.
        for i in range(16):
            n = i + 1
            block(n, spend=out[i], block_size=n * ONE_MEGABYTE)
            yield accepted()

        # block of maximal size
        block(17, spend=out[16], block_size=self.excessive_block_size)
        yield accepted()

        # Reject oversized blocks with bad-blk-length error
        block(18, spend=out[17], block_size=self.excessive_block_size + 1)
        yield rejected(RejectResult(16, b'bad-blk-length'))

        # Rewind bad block.
        tip(17)

        # Accept many sigops
        lots_of_checksigs = CScript(
            [OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB - 1))
        block(
            19, spend=out[17], script=lots_of_checksigs, block_size=ONE_MEGABYTE)
        yield accepted()

        too_many_blk_checksigs = CScript(
            [OP_CHECKSIG] * MAX_BLOCK_SIGOPS_PER_MB)
        block(
            20, spend=out[18], script=too_many_blk_checksigs, block_size=ONE_MEGABYTE)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(19)

        # Accept 40k sigops per block > 1MB and <= 2MB
        block(21, spend=out[18], script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB, block_size=ONE_MEGABYTE + 1)
        yield accepted()

        # Accept 40k sigops per block > 1MB and <= 2MB
        block(22, spend=out[19], script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB, block_size=2 * ONE_MEGABYTE)
        yield accepted()

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(23, spend=out[20], script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=ONE_MEGABYTE + 1)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(22)

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(24, spend=out[20], script=lots_of_checksigs,
              extra_sigops=MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=2 * ONE_MEGABYTE)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(22)

        # Accept 60k sigops per block > 2MB and <= 3MB
        block(25, spend=out[20], script=lots_of_checksigs, extra_sigops=2 *
              MAX_BLOCK_SIGOPS_PER_MB, block_size=2 * ONE_MEGABYTE + 1)
        yield accepted()

        # Accept 60k sigops per block > 2MB and <= 3MB
        block(26, spend=out[21], script=lots_of_checksigs,
              extra_sigops=2 * MAX_BLOCK_SIGOPS_PER_MB, block_size=3 * ONE_MEGABYTE)
        yield accepted()

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(27, spend=out[22], script=lots_of_checksigs, extra_sigops=2 *
              MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=2 * ONE_MEGABYTE + 1)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(26)

        # Reject more than 40k sigops per block > 1MB and <= 2MB.
        block(28, spend=out[22], script=lots_of_checksigs, extra_sigops=2 *
              MAX_BLOCK_SIGOPS_PER_MB + 1, block_size=3 * ONE_MEGABYTE)
        yield rejected(RejectResult(16, b'bad-blk-sigops'))

        # Rewind bad block
        tip(26)

        # Too many sigops in one txn
        too_many_tx_checksigs = CScript(
            [OP_CHECKSIG] * (MAX_BLOCK_SIGOPS_PER_MB + 1))
        block(
            29, spend=out[22], script=too_many_tx_checksigs, block_size=ONE_MEGABYTE + 1)
        yield rejected(RejectResult(16, b'bad-txn-sigops'))

        # Rewind bad block
        tip(26)

        # P2SH
        # Build the redeem script, hash it, use hash to create the p2sh script
        redeem_script = CScript([self.coinbase_pubkey] + [
                                OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG])
        redeem_script_hash = hash160(redeem_script)
        p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])

        # Create a p2sh transaction
        p2sh_tx = self.create_and_sign_transaction(
            out[22].tx, out[22].n, 1, p2sh_script)

        # Add the transaction to the block
        block(30)
        update_block(30, [p2sh_tx])
        yield accepted()

        # Creates a new transaction using the p2sh transaction included in the
        # last block
        def spend_p2sh_tx(output_script=CScript([OP_TRUE])):
            # Create the transaction
            spent_p2sh_tx = CTransaction()
            spent_p2sh_tx.vin.append(CTxIn(COutPoint(p2sh_tx.sha256, 0), b''))
            spent_p2sh_tx.vout.append(CTxOut(1, output_script))
            # Sign the transaction using the redeem script
            sighash = SignatureHashForkId(
                redeem_script, spent_p2sh_tx, 0, SIGHASH_ALL | SIGHASH_FORKID, p2sh_tx.vout[0].nValue)
            sig = self.coinbase_key.sign(sighash) + bytes(
                bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            spent_p2sh_tx.vin[0].scriptSig = CScript([sig, redeem_script])
            spent_p2sh_tx.rehash()
            return spent_p2sh_tx

        # Sigops p2sh limit
        p2sh_sigops_limit = MAX_BLOCK_SIGOPS_PER_MB - \
            redeem_script.GetSigOpCount(True)
        # Too many sigops in one p2sh txn
        too_many_p2sh_sigops = CScript([OP_CHECKSIG] * (p2sh_sigops_limit + 1))
        block(31, spend=out[23], block_size=ONE_MEGABYTE + 1)
        update_block(31, [spend_p2sh_tx(too_many_p2sh_sigops)])
        yield rejected(RejectResult(16, b'bad-txn-sigops'))

        # Rewind bad block
        tip(30)

        # Max sigops in one p2sh txn
        max_p2sh_sigops = CScript([OP_CHECKSIG] * (p2sh_sigops_limit))
        block(32, spend=out[23], block_size=ONE_MEGABYTE + 1)
        update_block(32, [spend_p2sh_tx(max_p2sh_sigops)])
        yield accepted()

        # Check that compact block also work for big blocks
        node = self.nodes[0]
        peer = TestNode()
        peer.add_connection(NodeConn('127.0.0.1', p2p_port(0), node, peer))

        # Start up network handling in another thread and wait for connection
        # to be etablished
        NetworkThread().start()
        peer.wait_for_verack()

        # Wait for SENDCMPCT
        def received_sendcmpct():
            return (peer.last_sendcmpct != None)
        got_sendcmpt = wait_until(received_sendcmpct, timeout=30)
        assert(got_sendcmpt)

        sendcmpct = msg_sendcmpct()
        sendcmpct.version = 1
        sendcmpct.announce = True
        peer.send_and_ping(sendcmpct)

        # Exchange headers
        def received_getheaders():
            return (peer.last_getheaders != None)
        got_getheaders = wait_until(received_getheaders, timeout=30)
        assert(got_getheaders)

        # Return the favor
        peer.send_message(peer.last_getheaders)

        # Wait for the header list
        def received_headers():
            return (peer.last_headers != None)
        got_headers = wait_until(received_headers, timeout=30)
        assert(got_headers)

        # It's like we know about the same headers !
        peer.send_message(peer.last_headers)

        # Send a block
        b33 = block(33, spend=out[24], block_size=ONE_MEGABYTE + 1)
        yield accepted()

        # Checks the node to forward it via compact block
        def received_block():
            return (peer.last_cmpctblock != None)
        got_cmpctblock = wait_until(received_block, timeout=30)
        assert(got_cmpctblock)

        # Was it our block ?
        cmpctblk_header = peer.last_cmpctblock.header_and_shortids.header
        cmpctblk_header.calc_sha256()
        assert(cmpctblk_header.sha256 == b33.sha256)

        # Send a bigger block
        peer.clear_block_data()
        b34 = block(34, spend=out[25], block_size=8 * ONE_MEGABYTE)
        yield accepted()

        # Checks the node to forward it via compact block
        got_cmpctblock = wait_until(received_block, timeout=30)
        assert(got_cmpctblock)

        # Was it our block ?
        cmpctblk_header = peer.last_cmpctblock.header_and_shortids.header
        cmpctblk_header.calc_sha256()
        assert(cmpctblk_header.sha256 == b34.sha256)

        # Let's send a compact block and see if the node accepts it.
        # First, we generate the block and send all transaction to the mempool
        b35 = block(35, spend=out[26], block_size=8 * ONE_MEGABYTE)
        for i in range(1, len(b35.vtx)):
            node.sendrawtransaction(ToHex(b35.vtx[i]), True)

        # Now we create the compact block and send it
        comp_block = HeaderAndShortIDs()
        comp_block.initialize_from_block(b35)
        peer.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))

        # Check that compact block is received properly
        assert(int(node.getbestblockhash(), 16) == b35.sha256)
Esempio n. 27
0
class BigBlockTests(BitcoinTestFramework):

    def set_test_params(self):
        self.setup_clean_chain = True
        self.num_nodes = 3

        self.coinbase_key = CECKey()
        self.coinbase_key.set_secretbytes(b"horsebattery")
        self.coinbase_pubkey = self.coinbase_key.get_pubkey()
        self.locking_script = CScript([self.coinbase_pubkey, OP_CHECKSIG])

        self.nodeArgs = [ '-genesisactivationheight=1',
                          '-blockmaxsize={}'.format(ONE_GIGABYTE * 5),
                          '-maxmempool=10000',
                          '-maxnonstdtxvalidationduration=100000',
                          '-maxtxnvalidatorasynctasksrunduration=100001',
                          '-blockdownloadtimeoutbasepercent=300' ]

        self.extra_args = [self.nodeArgs] * self.num_nodes

    def setup_nodes(self):
        self.add_nodes(self.num_nodes, self.extra_args, timewait=int(1200 * self.options.timeoutfactor))
        self.start_nodes()

    # Create and send block with coinbase
    def make_coinbase(self, conn):
        tip = conn.rpc.getblock(conn.rpc.getbestblockhash())

        coinbase_tx = create_coinbase(tip["height"] + 1, self.coinbase_pubkey)
        coinbase_tx.rehash()

        block = create_block(int(tip["hash"], 16), coinbase_tx, tip["time"] + 1)
        block.solve()

        conn.send_message(msg_block(block))
        wait_until(lambda: conn.rpc.getbestblockhash() == block.hash, timeout=int(30 * self.options.timeoutfactor))

        return coinbase_tx

    def sign_tx(self, tx, spendtx, n):
        scriptPubKey = bytearray(spendtx.vout[n].scriptPubKey)
        sighash = SignatureHashForkId(spendtx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spendtx.vout[n].nValue)
        tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))])

    # Generate some large transactions and put them in the mempool
    def create_and_send_transactions(self, conn, spendtx, num_of_transactions, money_to_spend=5000000000):
        for i in range(0, num_of_transactions):
            money_to_spend = money_to_spend - 500000000  # Large fee required for big txns
            tx = create_tx(spendtx, 0, money_to_spend, script=CScript([OP_DROP, OP_TRUE]))
            tx.vout.append(CTxOut(0, CScript([OP_FALSE, OP_RETURN, bytearray([0x00] * (ONE_MEGABYTE * 880))])))
            self.sign_tx(tx, spendtx, 0)
            tx.rehash()

            conn.send_message(msg_tx(tx))
            wait_until(lambda: tx.hash in conn.rpc.getrawmempool(), timeout=int(360 * self.options.timeoutfactor))
            logger.info("Submitted txn {} of {}".format(i+1, num_of_transactions))
            assert conn.rpc.getmempoolinfo()['size'] == i+1

            spendtx = tx

    def run_test(self):
        # Get out of IBD
        self.nodes[0].generate(1)
        self.sync_all()

        # Stop node so we can restart it with our connections
        self.stop_node(0)

        # Disconnect node1 and node2 for now
        disconnect_nodes_bi(self.nodes, 1, 2)

        connArgs = [ { "versionNum":MY_VERSION }, { "versionNum":70015 } ]
        with self.run_node_with_connections("Test old and new protocol versions", 0, self.nodeArgs, number_of_connections=2,
                                            connArgs=connArgs, cb_class=MyConnCB) as (newVerConn,oldVerConn):
            assert newVerConn.connected
            assert oldVerConn.connected

            # Generate small block, verify we get it over both connections
            self.nodes[0].generate(1)
            wait_until(lambda: newVerConn.cb.block_count == 1, timeout=int(30 * self.options.timeoutfactor))
            wait_until(lambda: oldVerConn.cb.block_count == 1, timeout=int(30 * self.options.timeoutfactor))

            # Get us a spendable output
            coinbase_tx = self.make_coinbase(newVerConn)
            self.nodes[0].generate(100)

            # Put some large txns into the nodes mempool until it exceeds 4GB in size
            self.create_and_send_transactions(newVerConn, coinbase_tx, 5)

            # Reconnect node0 and node2 and sync their blocks. Node2 will end up receiving the
            # large block via compact blocks
            connect_nodes(self.nodes, 0, 2)
            sync_blocks(itemgetter(0,2)(self.nodes))

            # Mine a >4GB block, verify we only get it over the new connection
            old_block_count = newVerConn.cb.block_count
            logger.info("Mining a big block")
            self.nodes[0].generate(1)
            assert(self.nodes[0].getmempoolinfo()['size'] == 0)
            logger.info("Waiting for block to arrive at test")
            wait_until(lambda: newVerConn.cb.block_count == old_block_count+1, timeout=int(1200 * self.options.timeoutfactor))

            # Look for log message saying we won't send to old peer
            wait_until(lambda: check_for_log_msg(self, "cannot be sent because it exceeds max P2P message limit", "/node0"))

            # Verify node2 gets the big block via a (not very) compact block
            wait_until(lambda: self.nodes[0].getbestblockhash() == self.nodes[2].getbestblockhash())
            peerinfo = self.nodes[2].getpeerinfo()
            assert(peerinfo[0]['bytesrecv_per_msg']['cmpctblock'] > 0)
            assert(peerinfo[0]['bytesrecv_per_msg']['blocktxn'] > 0)

            # Reconnect node0 to node1
            logger.info("Syncing bitcoind nodes to big block")
            connect_nodes(self.nodes, 0, 1)
            self.sync_all(timeout=int(1200 * self.options.timeoutfactor))

            # Verify node1 also got the big block
            assert(self.nodes[0].getbestblockhash() == self.nodes[1].getbestblockhash())