Пример #1
0
 def create_test_block(self, txs):
     block = create_block(self.tip, create_coinbase(self.tipheight + 1),
                          self.last_block_time + 600)
     block.vtx.extend(txs)
     block.nHeight = self.tipheight + 1
     prepare_block(block)
     return block
Пример #2
0
    def run_test(self):
        node0 = self.nodes[0]
        node0conn = node0.add_p2p_connection(BaseNode())

        tip = int(node0.getbestblockhash(), 16)
        height = node0.getblockcount() + 1
        time = node0.getblock(node0.getbestblockhash())['time'] + 1

        blocks = []
        for i in range(NUM_IBD_BLOCKS * 2):
            block = create_block(tip, create_coinbase(height), time)
            block.nHeight = height
            prepare_block(block)
            blocks.append(block)
            tip = block.sha256
            height += 1
            time += 1

        # Headers need to be sent in-order
        for b in blocks:
            node0conn.send_header(b)

        # Send blocks in some random order
        for b in random.sample(blocks, len(blocks)):
            node0conn.send_block(b)

        # The node should eventually, completely sync without getting stuck
        def node_synced():
            return node0.getbestblockhash() == blocks[-1].hash
        wait_until(node_synced)
Пример #3
0
    def test_null_locators(self, test_node, inv_node):
        tip = self.nodes[0].getblockheader(self.nodes[0].generatetoaddress(
            1, self.nodes[0].get_deterministic_priv_key().address)[0])
        tip_hash = int(tip["hash"], 16)

        inv_node.check_last_inv_announcement(inv=[tip_hash])
        test_node.check_last_inv_announcement(inv=[tip_hash])

        self.log.info(
            "Verify getheaders with null locator and valid hashstop returns headers."
        )
        test_node.clear_block_announcements()
        test_node.send_get_headers(locator=[], hashstop=tip_hash)
        test_node.check_last_headers_announcement(headers=[tip_hash])

        self.log.info(
            "Verify getheaders with null locator and invalid hashstop does not return headers."
        )
        height = tip["height"] + 1
        block = create_block(int(tip["hash"], 16), create_coinbase(height),
                             tip["mediantime"] + 1)
        block.nHeight = height
        prepare_block(block)
        test_node.send_header_for_blocks([block])
        test_node.clear_block_announcements()
        test_node.send_get_headers(locator=[], hashstop=int(block.hash, 16))
        test_node.sync_with_ping()
        assert_equal(test_node.block_announced, False)
        inv_node.clear_block_announcements()
        test_node.send_message(msg_block(block))
        inv_node.check_last_inv_announcement(inv=[int(block.hash, 16)])
Пример #4
0
 def build_block_on_tip(self, node):
     height = node.getblockcount() + 1
     tip = node.getbestblockhash()
     mtp = node.getblockheader(tip)['mediantime']
     block = create_block(
         int(tip, 16), create_coinbase(height), mtp + 1)
     block.nHeight = height
     prepare_block(block)
     return block
Пример #5
0
    def run_test(self):
        node = self.nodes[0]  # alias

        node.add_p2p_connection(P2PTxInvStore())

        self.log.info("Create a new transaction and wait until it's broadcast")
        txid = int(node.sendtoaddress(node.getnewaddress(), 1), 16)

        # Wallet rebroadcast is first scheduled 1 sec after startup (see
        # nNextResend in ResendWalletTransactions()). Sleep for just over a
        # second to be certain that it has been called before the first
        # setmocktime call below.
        time.sleep(1.1)

        # Can take a few seconds due to transaction trickling
        wait_until(lambda: node.p2p.tx_invs_received[txid] >= 1,
                   lock=mininode_lock)

        # Add a second peer since txs aren't rebroadcast to the same peer (see
        # filterInventoryKnown)
        node.add_p2p_connection(P2PTxInvStore())

        self.log.info("Create a block")
        # Create and submit a block without the transaction.
        # Transactions are only rebroadcast if there has been a block at least five minutes
        # after the last time we tried to broadcast. Use mocktime and give an
        # extra minute to be sure.
        block_time = int(time.time()) + 6 * 60
        node.setmocktime(block_time)
        height = node.getblockcount() + 1
        block = create_block(int(node.getbestblockhash(), 16),
                             create_coinbase(height), block_time)
        block.nHeight = height
        prepare_block(block)
        node.submitblock(ToHex(block))

        node.syncwithvalidationinterfacequeue()
        now = int(time.time())

        # Transaction should not be rebroadcast within first 12 hours
        # Leave 2 mins for buffer
        twelve_hrs = 12 * 60 * 60
        two_min = 2 * 60
        node.setmocktime(now + twelve_hrs - two_min)
        # ensure enough time has passed for rebroadcast attempt to occur
        time.sleep(2)
        assert_equal(txid in node.p2ps[1].get_invs(), False)

        self.log.info("Bump time & check that transaction is rebroadcast")
        # Transaction should be rebroadcast approximately 24 hours in the future,
        # but can range from 12-36. So bump 36 hours to be sure.
        node.setmocktime(now + 36 * 60 * 60)
        wait_until(lambda: node.p2ps[1].tx_invs_received[txid] >= 1,
                   lock=mininode_lock)
 def update_block(block_number, new_transactions):
     block = self.blocks[block_number]
     block.vtx.extend(new_transactions)
     old_sha256 = block.sha256
     prepare_block(block)
     # Update the internal state just like in next_block
     self.tip = block
     if block.sha256 != old_sha256:
         self.block_heights[
             block.sha256] = self.block_heights[old_sha256]
         del self.block_heights[old_sha256]
     self.blocks[block_number] = block
     return block
Пример #7
0
    def build_chain(self, nblocks, prev_hash, prev_height, prev_median_time):
        blocks = []
        for _ in range(nblocks):
            coinbase = create_coinbase(prev_height + 1)
            block_time = prev_median_time + 1
            block = create_block(int(prev_hash, 16), coinbase, block_time)
            block.nHeight = prev_height + 1
            prepare_block(block)

            blocks.append(block)
            prev_hash = block.hash
            prev_height += 1
            prev_median_time = block_time
        return blocks
Пример #8
0
    def build_block(self, parent, transactions=(), nTime=None):
        """Make a new block with an OP_1 coinbase output.

        Requires parent to have its height registered."""
        parent.calc_sha256()
        block_height = self.block_heights[parent.sha256] + 1
        block_time = (parent.nTime + 1) if nTime is None else nTime

        block = create_block(parent.sha256, create_coinbase(block_height),
                             block_time)
        block.nHeight = block_height
        block.vtx.extend(transactions)
        prepare_block(block)
        self.block_heights[block.sha256] = block_height
        return block
Пример #9
0
    def build_block_with_transactions(self, node, utxo, num_transactions):
        block = self.build_block_on_tip(node)

        for i in range(num_transactions):
            tx = CTransaction()
            tx.vin.append(CTxIn(COutPoint(utxo[0], utxo[1]), b''))
            tx.vout.append(CTxOut(utxo[2] - 1000, CScript([OP_TRUE])))
            pad_tx(tx)
            tx.rehash()
            utxo = [tx.txid, 0, tx.vout[0].nValue]
            block.vtx.append(tx)

        ordered_txs = block.vtx
        block.vtx = [block.vtx[0]] + \
            sorted(block.vtx[1:], key=lambda tx: tx.get_id())
        prepare_block(block)
        return block, ordered_txs
 def next_block(self, number):
     if self.tip is None:
         base_block_hash = self.genesis_hash
         block_time = int(time.time()) + 1
     else:
         base_block_hash = self.tip.sha256
         block_time = self.tip.nTime + 1
     # First create the coinbase
     height = self.block_heights[base_block_hash] + 1
     coinbase = create_coinbase(height)
     coinbase.rehash()
     block = create_block(base_block_hash, coinbase, block_time)
     block.nHeight = height
     prepare_block(block)
     self.tip = block
     self.block_heights[block.sha256] = height
     assert number not in self.blocks
     self.blocks[number] = block
     return block
Пример #11
0
    def next_block(self, number):
        if self.tip is None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1

        height = self.block_heights[base_block_hash] + 1
        # Add counter in coinbase to make blocks unique
        coinbase = create_coinbase(height, CScript([self.counter]))
        coinbase.rehash()
        self.counter += 1
        block = create_block(base_block_hash, coinbase, block_time)
        block.nHeight = height
        prepare_block(block)
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block
Пример #12
0
    def make_utxos(self):
        # Doesn't matter which node we use, just use node0.
        block = self.build_block_on_tip(self.nodes[0])
        self.test_node.send_and_ping(msg_block(block))
        assert int(self.nodes[0].getbestblockhash(), 16) == block.sha256
        self.nodes[0].generate(100)

        total_value = block.vtx[0].vout[1].nValue
        out_value = total_value // 10
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(block.vtx[0].txid, 1), b''))
        for i in range(10):
            tx.vout.append(CTxOut(out_value, CScript([OP_TRUE])))
        tx.rehash()

        block2 = self.build_block_on_tip(self.nodes[0])
        block2.vtx.append(tx)
        prepare_block(block2)
        self.test_node.send_and_ping(msg_block(block2))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block2.sha256)
        self.utxos.extend([[tx.txid, i, out_value] for i in range(10)])
        return
    def next_block(self, number):
        if self.tip is None:
            base_block_hash = self.genesis_hash
            block_time = FIRST_BLOCK_TIME
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height)
        coinbase.rehash()
        block = create_block(base_block_hash, coinbase, block_time)
        block.nHeight = height
        prepare_block(block)

        # Do PoW, which is cheap on regnet
        block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block
    def test_sequence_lock_unconfirmed_inputs(self):
        # Store height so we can easily reset the chain at the end of the test
        cur_height = self.nodes[0].getblockcount()

        # Create a mempool tx.
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
        tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
        tx1.rehash()

        # As the fees are calculated prior to the transaction being signed,
        # there is some uncertainty that calculate fee provides the correct
        # minimal fee. Since regtest coins are free, let's go ahead and
        # increase the fee by an order of magnitude to ensure this test
        # passes.
        fee_multiplier = 10

        # Anyone-can-spend mempool tx.
        # Sequence lock of 0 should pass.
        tx2 = CTransaction()
        tx2.nVersion = 2
        tx2.vin = [CTxIn(COutPoint(tx1.txid, 0), nSequence=0)]
        tx2.vout = [
            CTxOut(int(0), CScript([b'a']))]
        tx2.vout[0].nValue = tx1.vout[0].nValue - \
            fee_multiplier * self.nodes[0].calculate_fee(tx2)
        tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
        tx2 = FromHex(tx2, tx2_raw)
        tx2.rehash()
        self.nodes[0].sendrawtransaction(tx2_raw)

        # Create a spend of the 0th output of orig_tx with a sequence lock
        # of 1, and test what happens when submitting.
        # orig_tx.vout[0] must be an anyone-can-spend output
        def test_nonzero_locks(orig_tx, node, use_height_lock):
            sequence_value = 1
            if not use_height_lock:
                sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG

            tx = CTransaction()
            tx.nVersion = 2
            tx.vin = [
                CTxIn(COutPoint(orig_tx.txid, 0), nSequence=sequence_value)]
            tx.vout = [
                CTxOut(int(orig_tx.vout[0].nValue - fee_multiplier * node.calculate_fee(tx)), CScript([b'a']))]
            pad_tx(tx)
            tx.rehash()

            if (orig_tx.txid_hex in node.getrawmempool()):
                # sendrawtransaction should fail if the tx is in the mempool
                assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                        node.sendrawtransaction, ToHex(tx))
            else:
                # sendrawtransaction should succeed if the tx is not in the
                # mempool
                node.sendrawtransaction(ToHex(tx))

            return tx

        test_nonzero_locks(
            tx2, self.nodes[0], use_height_lock=True)
        test_nonzero_locks(
            tx2, self.nodes[0], use_height_lock=False)

        # Now mine some blocks, but make sure tx2 doesn't get mined.
        # Use prioritisetransaction to lower the effective feerate to 0
        self.nodes[0].prioritisetransaction(
            txid=tx2.txid_hex, fee_delta=-fee_multiplier * self.nodes[0].calculate_fee(tx2))
        cur_time = int(time.time())
        for i in range(10):
            self.nodes[0].setmocktime(cur_time + 600)
            self.nodes[0].generate(1)
            cur_time += 600

        assert tx2.txid_hex in self.nodes[0].getrawmempool()

        test_nonzero_locks(
            tx2, self.nodes[0], use_height_lock=True)
        test_nonzero_locks(
            tx2, self.nodes[0], use_height_lock=False)

        # Mine tx2, and then try again
        self.nodes[0].prioritisetransaction(
            txid=tx2.txid_hex, fee_delta=fee_multiplier * self.nodes[0].calculate_fee(tx2))

        # Advance the time on the node so that we can test timelocks
        self.nodes[0].setmocktime(cur_time + 600)
        self.nodes[0].generate(1)
        assert tx2.txid_hex not in self.nodes[0].getrawmempool()

        # Now that tx2 is not in the mempool, a sequence locked spend should
        # succeed
        tx3 = test_nonzero_locks(
            tx2, self.nodes[0], use_height_lock=False)
        assert tx3.txid_hex in self.nodes[0].getrawmempool()

        self.nodes[0].generate(1)
        assert tx3.txid_hex not in self.nodes[0].getrawmempool()

        # One more test, this time using height locks
        tx4 = test_nonzero_locks(
            tx3, self.nodes[0], use_height_lock=True)
        assert tx4.txid_hex in self.nodes[0].getrawmempool()

        # Now try combining confirmed and unconfirmed inputs
        tx5 = test_nonzero_locks(
            tx4, self.nodes[0], use_height_lock=True)
        assert tx5.txid_hex not in self.nodes[0].getrawmempool()

        utxos = self.nodes[0].listunspent()
        tx5.vin.append(
            CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
        tx5.vout[0].nValue += int(utxos[0]["amount"] * COIN)
        raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"]

        assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                self.nodes[0].sendrawtransaction, raw_tx5)

        # Test mempool-BIP68 consistency after reorg
        #
        # State of the transactions in the last blocks:
        # ... -> [ tx2 ] ->  [ tx3 ]
        #         tip-1        tip
        # And currently tx4 is in the mempool.
        #
        # If we invalidate the tip, tx3 should get added to the mempool, causing
        # tx4 to be removed (fails sequence-lock).
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        assert tx4.txid_hex not in self.nodes[0].getrawmempool()
        assert tx3.txid_hex in self.nodes[0].getrawmempool()

        # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
        # diagram above).
        # This would cause tx2 to be added back to the mempool, which in turn causes
        # tx3 to be removed.
        tip = int(self.nodes[0].getblockhash(
            self.nodes[0].getblockcount() - 1), 16)
        height = self.nodes[0].getblockcount()
        for i in range(2):
            block = create_block(tip, create_coinbase(height), cur_time)
            block.nHeight = height
            prepare_block(block)
            tip = block.sha256
            height += 1
            assert_equal(
                None if i == 1 else 'inconclusive',
                self.nodes[0].submitblock(
                    ToHex(block)))
            cur_time += 1

        mempool = self.nodes[0].getrawmempool()
        assert tx3.txid_hex not in mempool
        assert tx2.txid_hex in mempool

        # Reset the chain and get rid of the mocktimed-blocks
        self.nodes[0].setmocktime(0)
        self.nodes[0].invalidateblock(
            self.nodes[0].getblockhash(cur_height + 1))
        self.nodes[0].generate(10)
Пример #15
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining {} blocks".format(CLTV_HEIGHT - 2))
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(CLTV_HEIGHT - 2)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that an invalid-according-to-CLTV transaction cannot appear in a block"
        )

        fundtx = create_transaction(self.nodes[0],
                                    self.coinbase_txids[0],
                                    self.nodeaddress,
                                    amount=SUBSIDY - Decimal('1'),
                                    vout=1)
        fundtx, spendtx = cltv_lock_to_height(self.nodes[0], fundtx,
                                              self.nodeaddress,
                                              SUBSIDY - Decimal('2'))

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1),
                             block_time)
        block.nHeight = CLTV_HEIGHT - 1
        block.vtx.append(fundtx)
        # include the -1 CLTV in block
        block.vtx.append(spendtx)
        prepare_block(block)

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        # This block is invalid
        assert self.nodes[0].getbestblockhash() != block.hash

        # Create valid block to get over the threshold for the version enforcement
        block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1),
                             block_time)
        block.nHeight = CLTV_HEIGHT - 1
        prepare_block(block)
        self.nodes[0].p2p.send_and_ping(msg_block(block))

        tip = block.sha256
        block_time += 1
        self.log.info(
            "Test that invalid-according-to-cltv transactions cannot appear in a block"
        )
        block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time)
        block.nHeight = CLTV_HEIGHT

        fundtx = create_transaction(self.nodes[0],
                                    self.coinbase_txids[1],
                                    self.nodeaddress,
                                    amount=SUBSIDY - Decimal('1'),
                                    vout=1)
        fundtx, spendtx = cltv_lock_to_height(self.nodes[0], fundtx,
                                              self.nodeaddress,
                                              SUBSIDY - Decimal('2'))

        # The funding tx only has unexecuted bad CLTV, in scriptpubkey; this is
        # valid.
        self.nodes[0].p2p.send_and_ping(msg_tx(fundtx))
        assert fundtx.txid_hex in self.nodes[0].getrawmempool()

        # Mine a block containing the funding transaction
        block.vtx.append(fundtx)
        prepare_block(block)

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        # This block is valid
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        # We show that this tx is invalid due to CLTV by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.txid_hex,
            'allowed':
            False,
            'reject-reason':
            'mandatory-script-verify-flag-failed (Negative locktime)'
        }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()],
                                            maxfeerate=0))

        rejectedtx_signed = self.nodes[0].signrawtransactionwithwallet(
            ToHex(spendtx))

        # Couldn't complete signature due to CLTV
        assert rejectedtx_signed['errors'][0]['error'] == 'Negative locktime'

        tip = block.hash
        block_time += 1
        block = create_block(block.sha256, create_coinbase(CLTV_HEIGHT + 1),
                             block_time)
        block.nHeight = CLTV_HEIGHT + 1
        block.vtx.append(spendtx)
        prepare_block(block)

        with self.nodes[0].assert_debug_log(expected_msgs=[
                'ConnectBlock {} failed, blk-bad-inputs'.format(block.hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(self.nodes[0].getbestblockhash(), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted"
        )
        fundtx = create_transaction(self.nodes[0],
                                    self.coinbase_txids[2],
                                    self.nodeaddress,
                                    amount=SUBSIDY - Decimal('1'),
                                    vout=1)
        fundtx, spendtx = cltv_lock_to_height(self.nodes[0], fundtx,
                                              self.nodeaddress,
                                              SUBSIDY - Decimal('2'),
                                              CLTV_HEIGHT)

        # make sure sequence is nonfinal and locktime is good
        spendtx.vin[0].nSequence = 0xfffffffe
        spendtx.nLockTime = CLTV_HEIGHT

        # both transactions are fully valid
        self.nodes[0].sendrawtransaction(ToHex(fundtx))
        self.nodes[0].sendrawtransaction(ToHex(spendtx))

        # Modify the transactions in the block to be valid against CLTV
        block.vtx.pop(1)
        block.vtx.append(fundtx)
        block.vtx.append(spendtx)
        prepare_block(block)

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        # This block is now valid
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)
Пример #16
0
    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        blocks = []
        for _ in invalid_txs.iter_all_templates():
            block = create_block(tip, create_coinbase(height), block_time)
            block.nHeight = height
            prepare_block(block)
            block_time = block.nTime + 1
            height += 1
            # Save the coinbase for later
            blocks.append(block)
            tip = block.sha256
            node.p2p.send_blocks_and_test([block], node, success=True)

        self.log.info("Mature the blocks.")
        self.nodes[0].generatetoaddress(
            100, self.nodes[0].get_deterministic_priv_key().address)

        # Iterate through a list of known invalid transaction types, ensuring each is
        # rejected. Some are consensus invalid and some just violate policy.
        setup_txs = []
        for block, BadTxTemplate in zip(blocks,
                                        invalid_txs.iter_all_templates()):
            self.log.info("Testing invalid transaction: %s",
                          BadTxTemplate.__name__)
            template = BadTxTemplate(spend_block=block)
            setup_tx = template.get_setup_tx()
            if setup_tx is not None:
                node.p2p.send_txs_and_test([setup_tx], node)
                setup_txs.append(setup_tx)
                tx = template.get_tx(setup_tx)
            else:
                tx = template.get_tx()
            node.p2p.send_txs_and_test(
                [tx],
                node,
                success=False,
                expect_disconnect=template.expect_disconnect,
                reject_reason=template.reject_reason,
            )

            if template.expect_disconnect:
                self.log.info("Reconnecting to peer")
                self.reconnect_p2p()

        # Make two p2p connections to provide the node with orphans
        # * p2ps[0] will send valid orphan txs (one with low fee)
        # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
        self.reconnect_p2p(num_connections=2)

        self.log.info('Test orphan transaction handling ... ')
        # Create a root transaction that we withold until all dependend transactions
        # are sent out and in the orphan cache
        SCRIPT_PUB_KEY_OP_TRUE = CScript([OP_TRUE])
        tx_withhold = CTransaction()
        tx_withhold.vin.append(
            CTxIn(outpoint=COutPoint(blocks[0].vtx[0].txid, 1)))
        tx_withhold.vout.append(
            CTxOut(nValue=int(SUBSIDY * COIN) - 12000,
                   scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        pad_tx(tx_withhold)
        tx_withhold.calc_txid()

        # Our first orphan tx with some outputs to create further orphan txs
        tx_orphan_1 = CTransaction()
        tx_orphan_1.vin.append(CTxIn(outpoint=COutPoint(tx_withhold.txid, 0)))
        tx_orphan_1.vout = [
            CTxOut(nValue=int(0.1 * COIN), scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)
        ] * 3
        pad_tx(tx_orphan_1)
        tx_orphan_1.calc_txid()

        # A valid transaction with low fee
        tx_orphan_2_no_fee = CTransaction()
        tx_orphan_2_no_fee.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.txid, 0)))
        tx_orphan_2_no_fee.vout.append(
            CTxOut(nValue=int(0.1 * COIN),
                   scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        pad_tx(tx_orphan_2_no_fee)

        # A valid transaction with sufficient fee
        tx_orphan_2_valid = CTransaction()
        tx_orphan_2_valid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.txid, 1)))
        tx_orphan_2_valid.vout.append(
            CTxOut(nValue=int(0.1 * COIN) - 12000,
                   scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_orphan_2_valid.calc_txid()
        pad_tx(tx_orphan_2_valid)

        # An invalid transaction with negative fee
        tx_orphan_2_invalid = CTransaction()
        tx_orphan_2_invalid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.txid, 2)))
        tx_orphan_2_invalid.vout.append(
            CTxOut(nValue=int(1.1 * COIN),
                   scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        pad_tx(tx_orphan_2_invalid)
        tx_orphan_2_invalid.calc_txid()

        self.log.info('Send the orphans ... ')
        # Send valid orphan txs from p2ps[0]
        node.p2p.send_txs_and_test(
            [tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid],
            node,
            success=False)
        # Send invalid tx from p2ps[1]
        node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid],
                                       node,
                                       success=False)

        # Mempool should only have setup txs
        assert_equal(len(setup_txs), node.getmempoolinfo()['size'])
        # p2ps[1] is still connected
        assert_equal(2, len(node.getpeerinfo()))

        self.log.info('Send the withhold tx ... ')
        with node.assert_debug_log(expected_msgs=["bad-txns-in-belowout"]):
            node.p2p.send_txs_and_test([tx_withhold], node, success=True)

        # Transactions that should end up in the mempool
        expected_mempool = {
            t.txid_hex
            for t in [
                tx_withhold,  # The transaction that is the root for all orphans
                tx_orphan_1,  # The orphan transaction that splits the coins
                # The valid transaction (with sufficient fee)
                tx_orphan_2_valid,
            ] + setup_txs  # The setup transactions we added in the beginning
        }
        # Transactions that do not end up in the mempool
        # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
        # tx_orphan_invaid, because it has negative fee (p2ps[1] is
        # disconnected for relaying that tx)

        # p2ps[1] is no longer connected
        wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12)
        assert_equal(expected_mempool, set(node.getrawmempool()))

        self.log.info('Test orphan pool overflow')
        orphan_tx_pool = [CTransaction() for _ in range(101)]
        for i in range(len(orphan_tx_pool)):
            orphan_tx_pool[i].vin.append(CTxIn(outpoint=COutPoint(i, 333)))
            orphan_tx_pool[i].vout.append(
                CTxOut(nValue=int(1.1 * COIN),
                       scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
            pad_tx(orphan_tx_pool[i])

        with node.assert_debug_log(['mapOrphan overflow, removed 1 tx']):
            node.p2p.send_txs_and_test(orphan_tx_pool, node, success=False)

        rejected_parent = CTransaction()
        rejected_parent.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_2_invalid.txid, 0)))
        rejected_parent.vout.append(
            CTxOut(nValue=int(1.1 * COIN),
                   scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        pad_tx(rejected_parent)
        rejected_parent.rehash()
        with node.assert_debug_log([
                'not keeping orphan with rejected parents {}'.format(
                    rejected_parent.txid_hex)
        ]):
            node.p2p.send_txs_and_test([rejected_parent], node, success=False)
    def run_test(self):
        node = self.nodes[0]
        node.add_p2p_connection(P2PDataStore())

        # OP_TRUE in P2SH
        address = node.decodescript('51')['p2sh']
        # burn script
        p2sh_script = CScript([OP_HASH160, bytes(20), OP_EQUAL])

        prevblockhash = node.getbestblockhash()

        coinbase = create_coinbase(201)
        coinbase.vout[1].scriptPubKey = p2sh_script
        coinbase.rehash()
        sample_block = CBlock()
        sample_block.vtx = [coinbase]
        sample_block.hashPrevBlock = int(prevblockhash, 16)
        sample_block.nBits = 0x207fffff
        sample_block.nTime = 1600000036
        sample_block.nReserved = 0
        sample_block.nHeaderVersion = 1
        sample_block.nHeight = 201
        sample_block.hashEpochBlock = 0
        sample_block.hashMerkleRoot = sample_block.calc_merkle_root()
        sample_block.hashExtendedMetadata = hash256_int(b'\0')
        sample_block.update_size()

        # Using legacy hashing algo
        block = copy.deepcopy(sample_block)
        target = uint256_from_compact(block.nBits)
        block.rehash()
        while hash256_int(
                block.serialize()) > target or block.sha256 <= target:
            block.nNonce += 1
            block.rehash()
        self.fail_block(block, force_send=True, reject_reason='high-hash')
        del block

        # Claimed size already excessive (before doing any other checks)
        block = copy.deepcopy(sample_block)
        block.nSize = 32_000_001
        block.solve()
        self.fail_block(block, force_send=True, reject_reason='bad-blk-size')
        del block

        # Incorrect nBits
        block = copy.deepcopy(sample_block)
        block.nBits = 0x207ffffe
        block.solve()
        self.fail_block(block, force_send=True, reject_reason='bad-diffbits')
        del block

        # Block too old
        block = copy.deepcopy(sample_block)
        block.nTime = 1600000035
        block.solve()
        self.fail_block(block, force_send=True, reject_reason='time-too-old')
        del block

        # nReserved must be 0
        block = copy.deepcopy(sample_block)
        block.nReserved = 0x0100
        block.solve()
        self.fail_block(block,
                        force_send=True,
                        reject_reason='bad-blk-reserved')
        del block

        # nHeaderVersion must be 1
        block = copy.deepcopy(sample_block)
        block.nHeaderVersion = 0
        block.solve()
        self.fail_block(block,
                        force_send=True,
                        reject_reason='bad-blk-version')
        block.nHeaderVersion = 2
        block.solve()
        self.fail_block(block,
                        force_send=True,
                        reject_reason='bad-blk-version')
        del block

        # Incorrect claimed height
        block = copy.deepcopy(sample_block)
        block.nHeight = 200
        block.solve()
        self.fail_block(block, force_send=True, reject_reason='bad-blk-height')
        block.nHeight = 202
        block.solve()
        self.fail_block(block, force_send=True, reject_reason='bad-blk-height')
        del block

        # Invalid epoch block
        block = copy.deepcopy(sample_block)
        block.hashEpochBlock = 1
        block.solve()
        self.fail_block(block, force_send=True, reject_reason='bad-blk-epoch')
        del block

        # Time too far into the future
        block = copy.deepcopy(sample_block)
        block.nTime = int(time.time()) + 2 * 60 * 60 + 1
        block.solve()
        self.fail_block(block, force_send=True, reject_reason='time-too-new')
        del block

        # Invalid merkle root
        block = copy.deepcopy(sample_block)
        block.hashMerkleRoot = 0
        block.solve()
        self.fail_block(block, reject_reason='bad-txnmrklroot')
        del block

        # Invalid metadata hash
        block = copy.deepcopy(sample_block)
        block.hashExtendedMetadata = 0
        block.solve()
        self.fail_block(block, reject_reason='bad-metadata-hash')
        del block

        # Non-empty metadata
        block = copy.deepcopy(sample_block)
        block.vMetadata.append(CBlockMetadataField(0, b''))
        block.rehash_extended_metadata()
        block.solve()
        self.fail_block(block, reject_reason='bad-metadata')
        del block

        # Claimed nSize doesn't match actual size
        block = copy.deepcopy(sample_block)
        block.nSize = 1
        block.solve()
        self.fail_block(block, reject_reason='blk-size-mismatch')
        del block

        block_template = node.getblocktemplate()
        assert_equal(block_template.pop('capabilities'), ['proposal'])
        assert_equal(block_template.pop('version'), 1)
        assert_equal(block_template.pop('previousblockhash'), prevblockhash)
        assert_equal(
            block_template.pop('epochblockhash'),
            '0000000000000000000000000000000000000000000000000000000000000000')
        assert_equal(
            block_template.pop('extendedmetadatahash'),
            '9a538906e6466ebd2617d321f71bc94e56056ce213d366773699e28158e00614')
        assert_equal(block_template.pop('transactions'), [])
        assert_equal(block_template.pop('coinbaseaux'), {})
        assert_equal(block_template.pop('coinbasevalue'), int(SUBSIDY * COIN))
        assert_equal(block_template.pop('coinbasetxn'),
                     {'minerfund': {
                         'outputs': []
                     }})
        block_template.pop('longpollid')
        assert_equal(
            block_template.pop('target'),
            '7fffff0000000000000000000000000000000000000000000000000000000000')
        assert_equal(block_template.pop('mintime'), 1600000036)
        assert_equal(block_template.pop('mutable'),
                     ['time', 'transactions', 'prevblock'])
        assert_equal(block_template.pop('noncerange'), '00000000ffffffff')
        assert_equal(block_template.pop('sigoplimit'), 226950)
        assert_equal(block_template.pop('sizelimit'), 32000000)
        block_template.pop('curtime')
        assert_equal(block_template.pop('bits'), '207fffff')
        assert_equal(block_template.pop('height'), 201)
        assert_equal(block_template, {})

        # Check epoch hash is 0 for the first 20 blocks
        for height in range(201, 221):
            block_template = node.getblocktemplate()
            assert_equal(block_template['epochblockhash'], '00' * 32)
            block = self.block_from_template(block_template)
            block.hashEpochBlock = 0
            prepare_block(block)
            node.p2p.send_blocks_and_test([block], node)
            del block

        # Move to end of epoch
        node.generatetoaddress(4819, address)
        assert_equal(node.getblockcount(), 5039)

        epochblockhash = node.getbestblockhash()
        epochblock = node.getblock(epochblockhash)
        assert_equal(epochblock['epochblockhash'], '00' * 32)

        # getblocktemplate gives us current tip as epoch block hash
        block_template = node.getblocktemplate()
        assert_equal(block_template['epochblockhash'], epochblockhash)
        assert_equal(block_template['previousblockhash'], epochblockhash)

        # Using 0 as epoch block hash is now invalid
        block = self.block_from_template(block_template)
        block.hashEpochBlock = 0
        prepare_block(block)
        self.fail_block(block, force_send=True, reject_reason='bad-blk-epoch')

        # Setting current tip as epoch hash makes the block valid
        block.hashEpochBlock = int(epochblockhash, 16)
        prepare_block(block)
        node.p2p.send_blocks_and_test([block], node)
        del block

        # getblocktemplate still gives us the same epoch block hash
        block_template = node.getblocktemplate()
        assert_equal(block_template['epochblockhash'], epochblockhash)
        assert_equal(block_template['previousblockhash'],
                     node.getbestblockhash())

        # Block after that still requires epoch block hash
        block = self.block_from_template(block_template)
        block.hashEpochBlock = int(epochblockhash, 16)
        prepare_block(block)
        node.p2p.send_blocks_and_test([block], node)
        del block

        # Test 48-bit nTime
        node.setmocktime(
            2**32)  # smallest number that does not fit in 32-bit number
        block_template = node.getblocktemplate()
        assert_equal(block_template['curtime'], 2**32)
        block = self.block_from_template(block_template)
        block.nTime = 2**32
        prepare_block(block)
        node.p2p.send_blocks_and_test([block], node)
        del block

        node.setmocktime(2**48 - 1)  # biggest possible 48-bit number
        block_template = node.getblocktemplate()
        assert_equal(block_template['curtime'], 2**48 - 1)
        block = self.block_from_template(block_template)
        block.nTime = 2**48 - 1
        prepare_block(block)
        node.p2p.send_blocks_and_test([block], node)
        del block
    def run_test(self):
        node = self.nodes[0]
        node.add_p2p_connection(P2PDataStore())
        # OP_TRUE in P2SH to keep txs standard
        address = node.decodescript('51')['p2sh']
        num_mature_coins = 10
        node.generatetoaddress(num_mature_coins, address)
        node.generatetoaddress(100, address)

        value = int(SUBSIDY * COIN)

        p2sh_script = CScript([OP_HASH160, bytes(20), OP_EQUAL])

        def make_tx(coin_height):
            assert coin_height <= num_mature_coins
            block_hash = node.getblockhash(coin_height)
            coin = int(node.getblock(block_hash)['tx'][0], 16)
            # make non-standard transaction
            tx = CTransaction()
            tx.vin.append(
                CTxIn(COutPoint(coin, 1), CScript([b'\x51'])))
            return tx

        def make_block():
            parent_block_header = node.getblockheader(node.getbestblockhash())
            height = parent_block_header['height'] + 1
            coinbase = create_coinbase(height)
            coinbase.vout[1].scriptPubKey = p2sh_script
            coinbase.rehash()
            block = create_block(
                int(parent_block_header['hash'], 16), coinbase, parent_block_header['time'] + 1)
            block.nHeight = height
            return block

        # make a few non-standard txs
        nonstd_txs = []
        # bare OP_TRUE is a non-standard output
        bare_op_true_tx = make_tx(1)
        bare_op_true_tx.vout.append(
            CTxOut(value - 1000, CScript([OP_TRUE])))
        pad_tx(bare_op_true_tx)
        nonstd_txs.append(([bare_op_true_tx], 'scriptpubkey'))

        # version 0 is a non-standard version
        version_0_tx = make_tx(2)
        version_0_tx.nVersion = 0
        pad_tx(version_0_tx)
        nonstd_txs.append(([version_0_tx], 'version'))

        # version 3 is a non-standard version
        version_3_tx = make_tx(3)
        version_3_tx.nVersion = 3
        pad_tx(version_3_tx)
        nonstd_txs.append(([version_3_tx], 'version'))        

        # dust is non-standard (but ok in blocks)
        dust_tx = make_tx(4)
        dust_tx.vout.append(
            CTxOut(539, p2sh_script))
        dust_tx.vout.append(
            CTxOut(value - 2000, p2sh_script))
        pad_tx(dust_tx)
        nonstd_txs.append(([dust_tx], 'dust'))

        # OP_NOP10 is non-standard
        nop10_script = CScript([OP_NOP10, OP_TRUE])
        nop10_fund_tx = make_tx(5)
        nop10_fund_tx.vout.append(
            CTxOut(value - 2000, CScript([OP_HASH160, hash160(nop10_script), OP_EQUAL])))
        pad_tx(nop10_fund_tx)
        nop10_fund_tx.rehash()

        nop10_spend_tx = CTransaction()
        nop10_spend_tx.vin.append(
            CTxIn(COutPoint(nop10_fund_tx.txid, 0), CScript([nop10_script])))
        pad_tx(nop10_spend_tx)
        nonstd_txs.append(([nop10_fund_tx, nop10_spend_tx], 'non-mandatory-script-verify-flag (NOPx reserved for soft-fork upgrades)'))
        
        # also make a few standard txs to check if they still work
        std_txs = []
        p2sh_tx = make_tx(6)
        p2sh_tx.vout.append(
            CTxOut(value - 1000, p2sh_script))
        pad_tx(p2sh_tx)
        std_txs.append(p2sh_tx)

        # version 1 is a standard version
        version_1_tx = make_tx(7)
        version_1_tx.nVersion = 1
        pad_tx(version_1_tx)
        std_txs.append(version_1_tx)

        # version 2 is a standard version
        version_2_tx = make_tx(8)
        version_2_tx.nVersion = 2
        pad_tx(version_2_tx)
        std_txs.append(version_2_tx)
        
        # amount above dust limit is standard
        non_dust_tx = make_tx(9)
        non_dust_tx.vout.append(
            CTxOut(540, p2sh_script))
        non_dust_tx.vout.append(
            CTxOut(value - 2000, p2sh_script))
        non_dust_tx.rehash()
        std_txs.append(non_dust_tx)

        # ==== FIRST TEST ====
        # -acceptnonstdtxn=0 -allownonstdtxnconsensus=1
        # Original Bitcoin behavior: standardness is policy but not consensus 
        # ====            ====

        # verify non-standard txs are rejected from mempool
        for txs, reason in nonstd_txs:
            if len(txs) > 1:
                # txs before last one treated as setup txs
                node.p2p.send_txs_and_test(txs[:-1], node)
            node.p2p.send_txs_and_test(txs[-1:], node, success=False, reject_reason=reason)

        # verify standard txs are accepted into mempool
        node.p2p.send_txs_and_test(std_txs, node)

        # verify both sets of txs are accepted as blocks
        nonstd_block = make_block()
        nonstd_block.vtx.extend(
            tx
            for txs, _ in nonstd_txs
            for tx in txs
        )
        nonstd_block.vtx.extend(std_txs)
        prepare_block(nonstd_block)
        # send nonstd_block, expected accept
        node.p2p.send_blocks_and_test([nonstd_block], node)
        node.invalidateblock(node.getbestblockhash())


        # ==== SECOND TEST ====
        # -acceptnonstdtxn=0 -allownonstdtxnconsensus=0
        # New Logos behavior: standardness is both policy and consensus
        # ====             ====

        # This is default behavior and doesn't require parameters
        self.restart_node(0, ["[email protected]"])
        node.add_p2p_connection(P2PDataStore())

        # verify txs are rejected from mempool
        for txs, reason in nonstd_txs:
            if len(txs) > 1:
                # txs before last one treated as setup txs
                node.p2p.send_txs_and_test(txs[:-1], node)
            node.p2p.send_txs_and_test(txs[-1:], node, success=False, reject_reason=reason)

        # verify standard txs are accepted into mempool
        node.p2p.send_txs_and_test(std_txs, node)

        # verify txs in blocks are rejected
        for txs, reason in nonstd_txs:
            block = make_block()
            block.vtx += txs
            prepare_block(block)
            if reason == 'dust':
                # verify dust is actually allowed in block
                node.p2p.send_blocks_and_test([block], node)
                node.invalidateblock(node.getbestblockhash())
            else:
                if 'NOPx' in reason:
                    reason = 'blk-bad-inputs'
                else:
                    reason = 'contains a non-standard transaction (and fRequireStandardConsensus is true)'
                node.p2p.send_blocks_and_test([block], node, success=False, reject_reason=reason)

        # verify std txs are accepted as blocks
        std_block = make_block()
        std_block.vtx.extend(std_txs)
        prepare_block(std_block)
        # send std_block, expected accept
        node.p2p.send_blocks_and_test([std_block], node)
        node.invalidateblock(node.getbestblockhash())

        # ==== THIRD TEST ====
        # -acceptnonstdtxn=1 -allownonstdtxnconsensus=0
        # Invalid configuration: standardness not policy but consensus
        # ====            ====
        node.stop_node()
        node.start(["-acceptnonstdtxn=1",
                    "-allownonstdtxnconsensus=0"])
        def is_node_stopped_with_error():
            if not node.running:
                return True
            return_code = node.process.poll()
            if return_code is None:
                return False
            node.running = False
            node.process = None
            node.rpc_connected = False
            node.rpc = None
            node.log.debug("Node stopped")
            return True

        wait_until(is_node_stopped_with_error, timeout=5)
        node.stderr.flush()
        assert_equal(
            open(node.stderr.name).read(),
            'Error: -acceptnonstdtxn=1 -allownonstdtxnconsensus=0 is an invalid combination\n')

        # ==== FOURTH TEST ====
        # -acceptnonstdtxn=1 -allownonstdtxnconsensus=1
        # Standardness neither policy nor consensus, everything goes
        # ====             ====

        # use node.start as node already stopped in previous test
        node.start(["-acceptnonstdtxn=1",
                    "-allownonstdtxnconsensus=1"])
        node.wait_for_rpc_connection()
        node.add_p2p_connection(P2PDataStore())

        # verify non-standard txs are accepted to mempool (except OP_NOP10)
        node.p2p.send_txs_and_test(
            [
                tx
                for txs, _ in nonstd_txs[:-1]
                for tx in txs
            ],
            node)

        # verify standard txs are accepted into mempool
        node.p2p.send_txs_and_test(std_txs, node)
        # fund tx for OP_NOP10 is accepted
        node.p2p.send_txs_and_test([nop10_fund_tx], node)
        # spend tx for OP_NOP10 is still rejected
        node.p2p.send_txs_and_test([nop10_spend_tx], node, success=False)
        nonstd_block.nTime += 1  # tweak time so we don't collide with invalidateblock
        nonstd_block.solve()
        # verify (tweaked) non-standard block from before is valid
        node.p2p.send_blocks_and_test([nonstd_block], node)
Пример #19
0
    def run_test(self):
        """Main test logic"""

        # Create P2P connections will wait for a verack to make sure the
        # connection is fully up
        self.nodes[0].add_p2p_connection(BaseNode())

        # Generating a block on one of the nodes will get us out of IBD
        blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
        self.sync_all(self.nodes[0:2])

        # Notice above how we called an RPC by calling a method with the same
        # name on the node object. Notice also how we used a keyword argument
        # to specify a named RPC argument. Neither of those are defined on the
        # node object. Instead there's some __getattr__() magic going on under
        # the covers to dispatch unrecognised attribute calls to the RPC
        # interface.

        # Logs are nice. Do plenty of them. They can be used in place of comments for
        # breaking the test into sub-sections.
        self.log.info("Starting test!")

        self.log.info("Calling a custom function")
        custom_function()

        self.log.info("Calling a custom method")
        self.custom_method()

        self.log.info("Create some blocks")
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.block_time = self.nodes[0].getblock(
            self.nodes[0].getbestblockhash())['time'] + 1

        height = self.nodes[0].getblockcount()

        for i in range(10):
            # Use the mininode and blocktools functionality to manually build a block
            # Calling the generate() rpc is easier, but this allows us to exactly
            # control the blocks and transactions.
            block = create_block(self.tip, create_coinbase(height + 1),
                                 self.block_time)
            block.nHeight = height + 1
            prepare_block(block)
            block_message = msg_block(block)
            # Send message is used to send a P2P message to the node over our
            # P2PInterface
            self.nodes[0].p2p.send_message(block_message)
            self.tip = block.sha256
            blocks.append(self.tip)
            self.block_time += 1
            height += 1

        self.log.info(
            "Wait for node1 to reach current tip (height 11) using RPC")
        self.nodes[1].waitforblockheight(11)

        self.log.info("Connect node2 and node1")
        connect_nodes(self.nodes[1], self.nodes[2])

        self.log.info("Wait for node2 to receive all the blocks from node1")
        self.sync_all()

        self.log.info("Add P2P connection to node2")
        self.nodes[0].disconnect_p2ps()

        self.nodes[2].add_p2p_connection(BaseNode())

        self.log.info("Test that node2 propagates all the blocks to us")

        getdata_request = msg_getdata()
        for block in blocks:
            getdata_request.inv.append(CInv(MSG_BLOCK, block))
        self.nodes[2].p2p.send_message(getdata_request)

        # wait_until() will loop until a predicate condition is met. Use it to test properties of the
        # P2PInterface objects.
        wait_until(lambda: sorted(blocks) == sorted(
            list(self.nodes[2].p2p.block_receive_map.keys())),
                   timeout=5,
                   lock=mininode_lock)

        self.log.info("Check that each block was received only once")
        # The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving
        # messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking
        # and synchronization issues. Note wait_until() acquires this global
        # lock when testing the predicate.
        with mininode_lock:
            for block in self.nodes[2].p2p.block_receive_map.values():
                assert_equal(block, 1)
Пример #20
0
 def solve_and_send_block(prevhash, height, time):
     b = create_block(prevhash, create_coinbase(height), time)
     b.nHeight = height
     prepare_block(b)
     node.p2p.send_and_ping(msg_block(b))
     return b
Пример #21
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining {} blocks".format(DERSIG_HEIGHT - 1))
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(DERSIG_HEIGHT - 1)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that transactions with non-DER signatures cannot appear in a block"
        )
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT),
                             block_time)
        block.nHeight = DERSIG_HEIGHT
        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[1],
                                     self.nodeaddress,
                                     amount=1.0,
                                     vout=1)
        unDERify(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for DERSIG by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.txid_hex,
            'allowed':
            False,
            'reject-reason':
            'mandatory-script-verify-flag-failed (Non-canonical DER signature)'
        }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()],
                                            maxfeerate=0))

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        prepare_block(block)

        with self.nodes[0].assert_debug_log(expected_msgs=[
                'ConnectBlock {} failed, blk-bad-inputs'.format(block.hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(self.nodes[0].getbestblockhash(), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that a version 3 block with a DERSIG-compliant transaction is accepted"
        )
        block.vtx[1] = create_transaction(self.nodes[0],
                                          self.coinbase_txids[1],
                                          self.nodeaddress,
                                          amount=1.0,
                                          vout=1)
        prepare_block(block)

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
Пример #22
0
    def run_test(self):
        node = self.nodes[0]
        node.add_p2p_connection(P2PDataStore())
        # OP_TRUE in P2SH
        address = node.decodescript('51')['p2sh']
        num_mature_coins = 30
        node.generatetoaddress(num_mature_coins, address)
        node.generatetoaddress(100, address)

        value = int(SUBSIDY * 1_000_000)
        p2sh_script = CScript([OP_HASH160, bytes(20), OP_EQUAL])

        def make_tx(coin_height):
            assert coin_height <= num_mature_coins
            block_hash = node.getblockhash(coin_height)
            coin = int(node.getblock(block_hash)['tx'][0], 16)
            tx = CTransaction()
            tx.vin.append(CTxIn(COutPoint(coin, 1), CScript([b'\x51'])))
            return tx

        def make_block():
            parent_block_header = node.getblockheader(node.getbestblockhash())
            height = parent_block_header['height'] + 1
            coinbase = create_coinbase(height)
            coinbase.vout[1].scriptPubKey = p2sh_script
            coinbase.rehash()
            block = create_block(int(parent_block_header['hash'], 16),
                                 coinbase, parent_block_header['time'] + 1)
            block.nHeight = height
            return block

        interesting_numbers = [
            0,
            1,
            -1,
            2,
            -2,
            4,
            -4,
            10,
            -10,
            127,
            -127,
            256,
            -256,
            0x7fffffff,
            -0x7fffffff,
            0x100000000,
            -0x100000000,
            0x7fffffffff,
            -0x7fffffffff,
            0x10000000000,
            -0x10000000000,
            0x7fffffffffffff,
            -0x7fffffffffffff,
            0x100000000000000,
            -0x100000000000000,
            0x7fffffffffffffff,
            -0x7fffffffffffffff,
            0x10000000000000000,
            -0x10000000000000000,
        ]

        # make integer scripts
        valid_scripts = []
        invalid_scripts = []

        def make_script(a, b=None, *, result, opcode):
            if (MIN_SCRIPT_INT <= a <= MAX_SCRIPT_INT
                    and (b is None or MIN_SCRIPT_INT <= b <= MAX_SCRIPT_INT)
                    and (MIN_SCRIPT_INT <= result <= MAX_SCRIPT_INT)):
                if b is None:
                    valid_scripts.append(
                        CScript([a, opcode, result, OP_EQUALVERIFY, OP_TRUE]))
                else:
                    valid_scripts.append(
                        CScript(
                            [a, b, opcode, result, OP_EQUALVERIFY, OP_TRUE]))
            else:
                if b is None:
                    invalid_scripts.append(CScript([a, opcode, OP_TRUE]))
                else:
                    invalid_scripts.append(CScript([a, b, opcode, OP_TRUE]))

        for a in interesting_numbers:
            make_script(a, result=a + 1, opcode=OP_1ADD)
            make_script(a, result=a - 1, opcode=OP_1SUB)
            make_script(a, result=-a, opcode=OP_NEGATE)
            make_script(a, result=abs(a), opcode=OP_ABS)
            make_script(a, result=not a, opcode=OP_NOT)
            make_script(a, result=a != 0, opcode=OP_0NOTEQUAL)
            for b in interesting_numbers:
                make_script(a, b, result=a + b, opcode=OP_ADD)
                make_script(a, b, result=a - b, opcode=OP_SUB)
                if b != 0:
                    # Note: We have to use Decimal here, as Python's integers behave differently
                    # for division and modulo for negative numbers.
                    make_script(a,
                                b,
                                result=int(Decimal(a) // Decimal(b)),
                                opcode=OP_DIV)
                    make_script(a,
                                b,
                                result=int(Decimal(a) % Decimal(b)),
                                opcode=OP_MOD)
                else:
                    invalid_scripts.append(CScript([a, b, OP_DIV, OP_TRUE]))
                    invalid_scripts.append(CScript([a, b, OP_MOD, OP_TRUE]))
                make_script(a, b, result=a < b, opcode=OP_LESSTHAN)
                make_script(a, b, result=a > b, opcode=OP_GREATERTHAN)
                make_script(a, b, result=a <= b, opcode=OP_LESSTHANOREQUAL)
                make_script(a, b, result=a >= b, opcode=OP_GREATERTHANOREQUAL)

        txs = []
        num_txs = 10
        scripts_per_tx = len(valid_scripts) // num_txs
        for i in range(1, num_txs + 1):
            fund_tx = make_tx(i * 2)
            spend_tx = make_tx(i * 2 + 1)
            scripts = valid_scripts[(i - 1) * scripts_per_tx:][:scripts_per_tx]
            for script in scripts:
                fund_tx.vout.append(
                    CTxOut(value // len(scripts),
                           CScript([OP_HASH160,
                                    hash160(script), OP_EQUAL])))
            fund_tx.rehash()
            for i, script in enumerate(scripts):
                spend_tx.vin.append(
                    CTxIn(COutPoint(fund_tx.txid, i), CScript([script])))
            spend_tx.vout.append(CTxOut(value // len(scripts), p2sh_script))
            txs.append(fund_tx)
            txs.append(spend_tx)
        block = make_block()
        block.vtx.extend(txs)
        prepare_block(block)
        node.p2p.send_blocks_and_test([block], node)

        fund_txs = []
        invalid_spend_txs = []
        num_txs = 5
        scripts_per_tx = len(invalid_scripts) // num_txs
        for i in range(1, num_txs + 1):
            fund_tx = make_tx(21 + i)
            scripts = invalid_scripts[(i - 1) *
                                      scripts_per_tx:][:scripts_per_tx]
            for script in scripts:
                fund_tx.vout.append(
                    CTxOut(value // len(scripts),
                           CScript([OP_HASH160,
                                    hash160(script), OP_EQUAL])))
            fund_tx.rehash()
            fund_txs.append(fund_tx)

            for vout, script in enumerate(scripts):
                spend_tx = CTransaction()
                spend_tx.vin.append(
                    CTxIn(COutPoint(fund_tx.txid, vout), CScript([script])))
                spend_tx.vout.append(CTxOut(value // len(scripts),
                                            p2sh_script))
                pad_tx(spend_tx)
                invalid_spend_txs.append(spend_tx)

        block = make_block()
        block.vtx.extend(fund_txs)
        prepare_block(block)
        node.p2p.send_blocks_and_test([block], node)

        invalid_block = make_block()
        invalid_block.vtx.append(None)
        for invalid_spend_tx in random.sample(invalid_spend_txs, 100):
            invalid_block.vtx[1] = invalid_spend_tx
            prepare_block(invalid_block)
            node.p2p.send_blocks_and_test(
                [invalid_block],
                node,
                success=False,
                reject_reason=
                'state=blk-bad-inputs, parallel script check failed')
Пример #23
0
    def run_test(self):
        self.mine_chain()
        node = self.nodes[0]

        def assert_submitblock(block, result_str_1, result_str_2=None):
            block.solve()
            result_str_2 = result_str_2 or 'duplicate-invalid'
            assert_equal(result_str_1,
                         node.submitblock(hexdata=block.serialize().hex()))
            assert_equal(result_str_2,
                         node.submitblock(hexdata=block.serialize().hex()))

        self.log.info('getmininginfo')
        mining_info = node.getmininginfo()
        assert_equal(mining_info['blocks'], 200)
        assert_equal(mining_info['chain'], self.chain)
        assert 'currentblocktx' not in mining_info
        assert 'currentblocksize' not in mining_info
        assert_equal(mining_info['difficulty'],
                     Decimal('4.656542373906925E-10'))
        assert_equal(mining_info['networkhashps'],
                     Decimal('0.003333333333333334'))
        assert_equal(mining_info['pooledtx'], 0)

        # Mine a block to leave initial block download
        node.generatetoaddress(1, node.get_deterministic_priv_key().address)
        tmpl = node.getblocktemplate()
        self.log.info("getblocktemplate: Test capability advertised")
        assert 'proposal' in tmpl['capabilities']

        next_height = int(tmpl["height"])
        coinbase_tx = create_coinbase(height=next_height)
        # sequence numbers must not be max for nLockTime to have effect
        coinbase_tx.vin[0].nSequence = 2**32 - 2
        coinbase_tx.rehash()

        block = CBlock()
        block.nHeaderVersion = tmpl["version"]
        block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
        block.nTime = tmpl["curtime"]
        block.nBits = int(tmpl["bits"], 16)
        block.nNonce = 0
        block.nHeight = next_height
        block.vtx = [coinbase_tx]
        prepare_block(block)

        self.log.info("getblocktemplate: Test valid block")
        assert_template(node, block, None)

        self.log.info("submitblock: Test block decode failure")
        assert_raises_rpc_error(-22, "Block decode failed", node.submitblock,
                                block.serialize()[:-15].hex())

        self.log.info(
            "getblocktemplate: Test bad input hash for coinbase transaction")
        bad_block = copy.deepcopy(block)
        bad_block.vtx[0].vin[0].prevout.hash += 1
        bad_block.vtx[0].rehash()
        assert_template(node, bad_block, 'bad-cb-missing')

        self.log.info("submitblock: Test invalid coinbase transaction")
        assert_raises_rpc_error(-22, "Block does not start with a coinbase",
                                node.submitblock,
                                bad_block.serialize().hex())

        self.log.info("getblocktemplate: Test truncated final transaction")
        assert_raises_rpc_error(-22, "Block decode failed",
                                node.getblocktemplate, {
                                    'data': block.serialize()[:-1].hex(),
                                    'mode': 'proposal'
                                })

        self.log.info(
            "getblocktemplate: Test duplicate transaction, results in 'bad-tx-coinbase'"
        )
        bad_block = copy.deepcopy(block)
        bad_block.vtx.append(bad_block.vtx[0])
        prepare_block(bad_block)
        assert_template(node, bad_block, 'bad-tx-coinbase')
        assert_submitblock(bad_block, 'bad-tx-coinbase', 'bad-tx-coinbase')

        self.log.info("getblocktemplate: Test invalid transaction")
        bad_block = copy.deepcopy(block)
        bad_tx = copy.deepcopy(bad_block.vtx[0])
        bad_tx.vin[0].prevout.hash = 255
        bad_tx.rehash()
        bad_block.vtx.append(bad_tx)
        prepare_block(bad_block)
        assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')
        assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent')

        self.log.info("getblocktemplate: Test nonfinal transaction")
        bad_block = copy.deepcopy(block)
        bad_block.vtx[0].nLockTime = 2**32 - 1
        bad_block.vtx[0].rehash()
        assert_template(node, bad_block, 'bad-txns-nonfinal')
        assert_submitblock(bad_block, 'bad-txns-nonfinal')

        self.log.info("getblocktemplate: Test bad tx count")
        # The tx count is immediately after the block header
        bad_block_sn = bytearray(block.serialize())
        # Check metadata is empty (for now)
        assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 0)
        # Check num txs = 1, which is behind the header and metadata
        assert_equal(bad_block_sn[BLOCK_HEADER_SIZE + 1], 1)
        # Increase num txs by 1 artificially
        bad_block_sn[BLOCK_HEADER_SIZE + 1] += 1
        assert_raises_rpc_error(-22, "Block decode failed",
                                node.getblocktemplate, {
                                    'data': bad_block_sn.hex(),
                                    'mode': 'proposal'
                                })

        self.log.info("getblocktemplate: Test bad bits")
        bad_block = copy.deepcopy(block)
        bad_block.nBits = 469762303  # impossible in the real world
        assert_template(node, bad_block, 'bad-diffbits')

        self.log.info("getblocktemplate: Test bad merkle root")
        bad_block = copy.deepcopy(block)
        bad_block.hashMerkleRoot += 1
        assert_template(node, bad_block, 'bad-txnmrklroot', False)
        assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot')

        self.log.info("getblocktemplate: Test bad timestamps")
        bad_block = copy.deepcopy(block)
        bad_block.nTime = 2**31 - 1
        assert_template(node, bad_block, 'time-too-new')
        assert_submitblock(bad_block, 'time-too-new', 'time-too-new')
        bad_block.nTime = 0
        assert_template(node, bad_block, 'time-too-old')
        assert_submitblock(bad_block, 'time-too-old', 'time-too-old')

        self.log.info("getblocktemplate: Test not best block")
        bad_block = copy.deepcopy(block)
        bad_block.hashPrevBlock = 123
        assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
        assert_submitblock(bad_block, 'prev-blk-not-found',
                           'prev-blk-not-found')

        self.log.info('submitheader tests')
        assert_raises_rpc_error(
            -22, 'Block header decode failed',
            lambda: node.submitheader(hexdata='xx' * BLOCK_HEADER_SIZE))
        assert_raises_rpc_error(
            -22, 'Block header decode failed',
            lambda: node.submitheader(hexdata='ff' * (BLOCK_HEADER_SIZE - 2)))
        assert_raises_rpc_error(
            -25, 'Must submit previous header', lambda: node.submitheader(
                hexdata=super(CBlock, bad_block).serialize().hex()))

        block.nTime += 1
        block.solve()

        def chain_tip(b_hash, *, status='headers-only', branchlen=1):
            return {
                'hash': b_hash,
                'height': 202,
                'branchlen': branchlen,
                'status': status
            }

        assert chain_tip(block.hash) not in node.getchaintips()
        node.submitheader(hexdata=block.serialize().hex())
        assert chain_tip(block.hash) in node.getchaintips()
        # Noop
        node.submitheader(hexdata=CBlockHeader(block).serialize().hex())
        assert chain_tip(block.hash) in node.getchaintips()

        bad_block_root = copy.deepcopy(block)
        bad_block_root.hashMerkleRoot += 2
        bad_block_root.solve()
        assert chain_tip(bad_block_root.hash) not in node.getchaintips()
        node.submitheader(
            hexdata=CBlockHeader(bad_block_root).serialize().hex())
        assert chain_tip(bad_block_root.hash) in node.getchaintips()
        # Should still reject invalid blocks, even if we have the header:
        assert_equal(
            node.submitblock(hexdata=bad_block_root.serialize().hex()),
            'bad-txnmrklroot')
        assert_equal(
            node.submitblock(hexdata=bad_block_root.serialize().hex()),
            'bad-txnmrklroot')
        assert chain_tip(bad_block_root.hash) in node.getchaintips()
        # We know the header for this invalid block, so should just return
        # early without error:
        node.submitheader(
            hexdata=CBlockHeader(bad_block_root).serialize().hex())
        assert chain_tip(bad_block_root.hash) in node.getchaintips()

        bad_block_lock = copy.deepcopy(block)
        bad_block_lock.vtx[0].nLockTime = 2**32 - 1
        bad_block_lock.vtx[0].rehash()
        bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root()
        bad_block_lock.solve()
        assert_equal(
            node.submitblock(hexdata=bad_block_lock.serialize().hex()),
            'bad-txns-nonfinal')
        assert_equal(
            node.submitblock(hexdata=bad_block_lock.serialize().hex()),
            'duplicate-invalid')
        # Build a "good" block on top of the submitted bad block
        bad_block2 = copy.deepcopy(block)
        bad_block2.hashPrevBlock = bad_block_lock.sha256
        bad_block2.solve()
        assert_raises_rpc_error(
            -25, 'bad-prevblk', lambda: node.submitheader(hexdata=CBlockHeader(
                bad_block2).serialize().hex()))

        # Should reject invalid header right away
        bad_block_time = copy.deepcopy(block)
        bad_block_time.nTime = 1
        bad_block_time.solve()
        assert_raises_rpc_error(
            -25, 'time-too-old', lambda: node.submitheader(
                hexdata=CBlockHeader(bad_block_time).serialize().hex()))

        # Should ask for the block from a p2p node, if they announce the header
        # as well:
        node.add_p2p_connection(P2PDataStore())
        # Drop the first getheaders
        node.p2p.wait_for_getheaders(timeout=5)
        node.p2p.send_blocks_and_test(blocks=[block], node=node)
        # Must be active now:
        assert chain_tip(block.hash, status='active',
                         branchlen=0) in node.getchaintips()

        # Building a few blocks should give the same results
        node.generatetoaddress(10, node.get_deterministic_priv_key().address)
        assert_raises_rpc_error(
            -25, 'time-too-old', lambda: node.submitheader(
                hexdata=CBlockHeader(bad_block_time).serialize().hex()))
        assert_raises_rpc_error(
            -25, 'bad-prevblk', lambda: node.submitheader(hexdata=CBlockHeader(
                bad_block2).serialize().hex()))
        node.submitheader(hexdata=CBlockHeader(block).serialize().hex())
        node.submitheader(
            hexdata=CBlockHeader(bad_block_root).serialize().hex())
        # valid
        assert_equal(node.submitblock(hexdata=block.serialize().hex()),
                     'duplicate')

        # Sanity check that maxtries supports large integers
        node.generatetoaddress(1,
                               node.get_deterministic_priv_key().address,
                               pow(2, 32))
Пример #24
0
    def test_nonnull_locators(self, test_node, inv_node):
        tip = int(self.nodes[0].getbestblockhash(), 16)

        # PART 1
        # 1. Mine a block; expect inv announcements each time
        self.log.info(
            "Part 1: headers don't start before sendheaders message...")
        for i in range(4):
            self.log.debug("Part 1.{}: starting...".format(i))
            old_tip = tip
            tip = self.mine_blocks(1)
            inv_node.check_last_inv_announcement(inv=[tip])
            test_node.check_last_inv_announcement(inv=[tip])
            # Try a few different responses; none should affect next
            # announcement
            if i == 0:
                # first request the block
                test_node.send_get_data([tip])
                test_node.wait_for_block(tip)
            elif i == 1:
                # next try requesting header and block
                test_node.send_get_headers(locator=[old_tip], hashstop=tip)
                test_node.send_get_data([tip])
                test_node.wait_for_block(tip)
                # since we requested headers...
                test_node.clear_block_announcements()
            elif i == 2:
                # this time announce own block via headers
                inv_node.clear_block_announcements()
                height = self.nodes[0].getblockcount()
                last_time = self.nodes[0].getblock(
                    self.nodes[0].getbestblockhash())['time']
                block_time = last_time + 1
                new_block = create_block(tip, create_coinbase(height + 1),
                                         block_time)
                new_block.nHeight = height + 1
                prepare_block(new_block)
                test_node.send_header_for_blocks([new_block])
                test_node.wait_for_getdata([new_block.sha256])
                # make sure this block is processed
                test_node.send_and_ping(msg_block(new_block))
                wait_until(lambda: inv_node.block_announced,
                           timeout=60,
                           lock=mininode_lock)
                inv_node.clear_block_announcements()
                test_node.clear_block_announcements()

        self.log.info("Part 1: success!")
        self.log.info(
            "Part 2: announce blocks with headers after sendheaders message..."
        )
        # PART 2
        # 2. Send a sendheaders message and test that headers announcements
        # commence and keep working.
        test_node.send_message(msg_sendheaders())
        prev_tip = int(self.nodes[0].getbestblockhash(), 16)
        test_node.send_get_headers(locator=[prev_tip], hashstop=0)
        test_node.sync_with_ping()

        # Now that we've synced headers, headers announcements should work
        tip = self.mine_blocks(1)
        inv_node.check_last_inv_announcement(inv=[tip])
        test_node.check_last_headers_announcement(headers=[tip])

        height = self.nodes[0].getblockcount() + 1
        block_time += 10  # Advance far enough ahead
        for i in range(10):
            self.log.debug("Part 2.{}: starting...".format(i))
            # Mine i blocks, and alternate announcing either via
            # inv (of tip) or via headers. After each, new blocks
            # mined by the node should successfully be announced
            # with block header, even though the blocks are never requested
            for j in range(2):
                self.log.debug("Part 2.{}.{}: starting...".format(i, j))
                blocks = []
                for b in range(i + 1):
                    blocks.append(
                        create_block(tip, create_coinbase(height), block_time))
                    blocks[-1].nHeight = height
                    prepare_block(blocks[-1])
                    tip = blocks[-1].sha256
                    block_time += 1
                    height += 1
                if j == 0:
                    # Announce via inv
                    test_node.send_block_inv(tip)
                    test_node.wait_for_getheaders()
                    # Should have received a getheaders now
                    test_node.send_header_for_blocks(blocks)
                    # Test that duplicate inv's won't result in duplicate
                    # getdata requests, or duplicate headers announcements
                    [inv_node.send_block_inv(x.sha256) for x in blocks]
                    test_node.wait_for_getdata([x.sha256 for x in blocks])
                    inv_node.sync_with_ping()
                else:
                    # Announce via headers
                    test_node.send_header_for_blocks(blocks)
                    test_node.wait_for_getdata([x.sha256 for x in blocks])
                    # Test that duplicate headers won't result in duplicate
                    # getdata requests (the check is further down)
                    inv_node.send_header_for_blocks(blocks)
                    inv_node.sync_with_ping()
                [test_node.send_message(msg_block(x)) for x in blocks]
                test_node.sync_with_ping()
                inv_node.sync_with_ping()
                # This block should not be announced to the inv node (since it also
                # broadcast it)
                assert "inv" not in inv_node.last_message
                assert "headers" not in inv_node.last_message
                tip = self.mine_blocks(1)
                inv_node.check_last_inv_announcement(inv=[tip])
                test_node.check_last_headers_announcement(headers=[tip])
                height += 1
                block_time += 1

        self.log.info("Part 2: success!")

        self.log.info(
            "Part 3: headers announcements can stop after large reorg, and resume after headers/inv from peer..."
        )

        # PART 3.  Headers announcements can stop after large reorg, and resume after
        # getheaders or inv from peer.
        for j in range(2):
            self.log.debug("Part 3.{}: starting...".format(j))
            # First try mining a reorg that can propagate with header
            # announcement
            new_block_hashes = self.mine_reorg(length=7)
            tip = new_block_hashes[-1]
            inv_node.check_last_inv_announcement(inv=[tip])
            test_node.check_last_headers_announcement(headers=new_block_hashes)

            block_time += 8

            # Mine a too-large reorg, which should be announced with a single
            # inv
            new_block_hashes = self.mine_reorg(length=8)
            tip = new_block_hashes[-1]
            inv_node.check_last_inv_announcement(inv=[tip])
            test_node.check_last_inv_announcement(inv=[tip])

            block_time += 9

            fork_point = self.nodes[0].getblock("{:064x}".format(
                new_block_hashes[0]))["previousblockhash"]
            fork_point = int(fork_point, 16)

            # Use getblocks/getdata
            test_node.send_getblocks(locator=[fork_point])
            test_node.check_last_inv_announcement(inv=new_block_hashes)
            test_node.send_get_data(new_block_hashes)
            test_node.wait_for_block(new_block_hashes[-1])

            for i in range(3):
                self.log.debug("Part 3.{}.{}: starting...".format(j, i))

                # Mine another block, still should get only an inv
                tip = self.mine_blocks(1)
                inv_node.check_last_inv_announcement(inv=[tip])
                test_node.check_last_inv_announcement(inv=[tip])
                if i == 0:
                    # Just get the data -- shouldn't cause headers
                    # announcements to resume
                    test_node.send_get_data([tip])
                    test_node.wait_for_block(tip)
                elif i == 1:
                    # Send a getheaders message that shouldn't trigger headers announcements
                    # to resume (best header sent will be too old)
                    test_node.send_get_headers(locator=[fork_point],
                                               hashstop=new_block_hashes[1])
                    test_node.send_get_data([tip])
                    test_node.wait_for_block(tip)
                elif i == 2:
                    # This time, try sending either a getheaders to trigger resumption
                    # of headers announcements, or mine a new block and inv it, also
                    # triggering resumption of headers announcements.
                    test_node.send_get_data([tip])
                    test_node.wait_for_block(tip)
                    if j == 0:
                        test_node.send_get_headers(locator=[tip], hashstop=0)
                        test_node.sync_with_ping()
                    else:
                        test_node.send_block_inv(tip)
                        test_node.sync_with_ping()
            # New blocks should now be announced with header
            tip = self.mine_blocks(1)
            inv_node.check_last_inv_announcement(inv=[tip])
            test_node.check_last_headers_announcement(headers=[tip])

        self.log.info("Part 3: success!")

        self.log.info("Part 4: Testing direct fetch behavior...")
        tip = self.mine_blocks(1)
        height = self.nodes[0].getblockcount() + 1
        last_time = self.nodes[0].getblock(
            self.nodes[0].getbestblockhash())['time']
        block_time = last_time + 1

        # Create 2 blocks.  Send the blocks, then send the headers.
        blocks = []
        for b in range(2):
            blocks.append(
                create_block(tip, create_coinbase(height), block_time))
            blocks[-1].nHeight = height
            prepare_block(blocks[-1])
            tip = blocks[-1].sha256
            block_time += 1
            height += 1
            inv_node.send_message(msg_block(blocks[-1]))

        inv_node.sync_with_ping()  # Make sure blocks are processed
        test_node.last_message.pop("getdata", None)
        test_node.send_header_for_blocks(blocks)
        test_node.sync_with_ping()
        # should not have received any getdata messages
        with mininode_lock:
            assert "getdata" not in test_node.last_message

        # This time, direct fetch should work
        blocks = []
        for b in range(3):
            blocks.append(
                create_block(tip, create_coinbase(height), block_time))
            blocks[-1].nHeight = height
            prepare_block(blocks[-1])
            tip = blocks[-1].sha256
            block_time += 1
            height += 1

        test_node.send_header_for_blocks(blocks)
        test_node.sync_with_ping()
        test_node.wait_for_getdata([x.sha256 for x in blocks],
                                   timeout=DIRECT_FETCH_RESPONSE_TIME)

        [test_node.send_message(msg_block(x)) for x in blocks]

        test_node.sync_with_ping()

        # Now announce a header that forks the last two blocks
        tip = blocks[0].sha256
        height -= 2
        blocks = []

        # Create extra blocks for later
        for b in range(20):
            blocks.append(
                create_block(tip, create_coinbase(height), block_time))
            blocks[-1].nHeight = height
            prepare_block(blocks[-1])
            tip = blocks[-1].sha256
            block_time += 1
            height += 1

        # Announcing one block on fork should not trigger direct fetch
        # (less work than tip)
        test_node.last_message.pop("getdata", None)
        test_node.send_header_for_blocks(blocks[0:1])
        test_node.sync_with_ping()
        with mininode_lock:
            assert "getdata" not in test_node.last_message

        # Announcing one more block on fork should trigger direct fetch for
        # both blocks (same work as tip)
        test_node.send_header_for_blocks(blocks[1:2])
        test_node.sync_with_ping()
        test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]],
                                   timeout=DIRECT_FETCH_RESPONSE_TIME)

        # Announcing 16 more headers should trigger direct fetch for 14 more
        # blocks
        test_node.send_header_for_blocks(blocks[2:18])
        test_node.sync_with_ping()
        test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]],
                                   timeout=DIRECT_FETCH_RESPONSE_TIME)

        # Announcing 1 more header should not trigger any response
        test_node.last_message.pop("getdata", None)
        test_node.send_header_for_blocks(blocks[18:19])
        test_node.sync_with_ping()
        with mininode_lock:
            assert "getdata" not in test_node.last_message

        self.log.info("Part 4: success!")

        # Now deliver all those blocks we announced.
        [test_node.send_message(msg_block(x)) for x in blocks]

        self.log.info("Part 5: Testing handling of unconnecting headers")
        # First we test that receipt of an unconnecting header doesn't prevent
        # chain sync.
        for i in range(10):
            self.log.debug("Part 5.{}: starting...".format(i))
            test_node.last_message.pop("getdata", None)
            blocks = []
            # Create two more blocks.
            for j in range(2):
                blocks.append(
                    create_block(tip, create_coinbase(height), block_time))
                blocks[-1].nHeight = height
                prepare_block(blocks[-1])
                tip = blocks[-1].sha256
                block_time += 1
                height += 1
            # Send the header of the second block -> this won't connect.
            with mininode_lock:
                test_node.last_message.pop("getheaders", None)
            test_node.send_header_for_blocks([blocks[1]])
            test_node.wait_for_getheaders()
            test_node.send_header_for_blocks(blocks)
            test_node.wait_for_getdata([x.sha256 for x in blocks])
            [test_node.send_message(msg_block(x)) for x in blocks]
            test_node.sync_with_ping()
            assert_equal(int(self.nodes[0].getbestblockhash(), 16),
                         blocks[1].sha256)

        blocks = []
        # Now we test that if we repeatedly don't send connecting headers, we
        # don't go into an infinite loop trying to get them to connect.
        MAX_UNCONNECTING_HEADERS = 10
        for j in range(MAX_UNCONNECTING_HEADERS + 1):
            blocks.append(
                create_block(tip, create_coinbase(height), block_time))
            blocks[-1].nHeight = height
            prepare_block(blocks[-1])
            tip = blocks[-1].sha256
            block_time += 1
            height += 1

        for i in range(1, MAX_UNCONNECTING_HEADERS):
            # Send a header that doesn't connect, check that we get a
            # getheaders.
            with mininode_lock:
                test_node.last_message.pop("getheaders", None)
            test_node.send_header_for_blocks([blocks[i]])
            test_node.wait_for_getheaders()

        # Next header will connect, should re-set our count:
        test_node.send_header_for_blocks([blocks[0]])

        # Remove the first two entries (blocks[1] would connect):
        blocks = blocks[2:]

        # Now try to see how many unconnecting headers we can send
        # before we get disconnected.  Should be 5*MAX_UNCONNECTING_HEADERS
        for i in range(5 * MAX_UNCONNECTING_HEADERS - 1):
            # Send a header that doesn't connect, check that we get a
            # getheaders.
            with mininode_lock:
                test_node.last_message.pop("getheaders", None)
            test_node.send_header_for_blocks([blocks[i % len(blocks)]])
            test_node.wait_for_getheaders()

        # Eventually this stops working.
        test_node.send_header_for_blocks([blocks[-1]])

        # Should get disconnected
        test_node.wait_for_disconnect()

        self.log.info("Part 5: success!")

        # Finally, check that the inv node never received a getdata request,
        # throughout the test
        assert "getdata" not in inv_node.last_message
Пример #25
0
    def run_test(self):
        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())

        # Build the blockchain
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.block_time = self.nodes[0].getblock(
            self.nodes[0].getbestblockhash())['time'] + 1

        self.blocks = []

        # Get a pubkey for the coinbase TXO
        coinbase_key = ECKey()
        coinbase_key.generate()
        coinbase_pubkey = coinbase_key.get_pubkey().get_bytes()

        # Create the first block with a coinbase output to our key
        height = 1
        block = create_block(self.tip, create_coinbase(height,
                                                       coinbase_pubkey),
                             self.block_time)
        block.nHeight = height
        self.blocks.append(block)
        self.block_time += 1
        prepare_block(block)
        # Save the coinbase for later
        self.block1 = block
        self.tip = block.sha256
        height += 1

        # Bury the block 100 deep so the coinbase output is spendable
        for i in range(100):
            block = create_block(self.tip, create_coinbase(height),
                                 self.block_time)
            block.nHeight = height
            prepare_block(block)
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        # Create a transaction spending the coinbase output with an invalid
        # (null) signature
        tx = CTransaction()
        tx.vin.append(
            CTxIn(COutPoint(self.block1.vtx[0].txid, 1), scriptSig=b""))
        tx.vout.append(CTxOut(int(SUBSIDY * COIN), CScript([OP_TRUE])))
        pad_tx(tx)
        tx.rehash()

        block102 = create_block(self.tip, create_coinbase(height),
                                self.block_time)
        block102.nHeight = height
        self.block_time += 1
        block102.vtx.extend([tx])
        prepare_block(block102)
        self.blocks.append(block102)
        self.tip = block102.sha256
        self.block_time += 1
        height += 1

        # Bury the assumed valid block 3400 deep
        for i in range(3700):
            block = create_block(self.tip, create_coinbase(height),
                                 self.block_time)
            block.nHeight = height
            prepare_block(block)
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        self.nodes[0].disconnect_p2ps()

        # Start node1 and node2 with assumevalid so they accept a block with a
        # bad signature.
        self.start_node(1,
                        extra_args=["-assumevalid=" + hex(block102.sha256)] +
                        required_args)
        self.start_node(2,
                        extra_args=["-assumevalid=" + hex(block102.sha256)] +
                        required_args)

        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
        p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
        p2p2 = self.nodes[2].add_p2p_connection(BaseNode())

        # send header lists to all three nodes
        p2p0.send_header_for_blocks(self.blocks[0:2000])
        p2p0.send_header_for_blocks(self.blocks[2000:])
        p2p1.send_header_for_blocks(self.blocks[0:2000])
        p2p1.send_header_for_blocks(self.blocks[2000:])
        p2p2.send_header_for_blocks(self.blocks[0:200])

        # Send blocks to node0. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p0)
        self.assert_blockchain_height(self.nodes[0], 101)

        # Send all blocks to node1. All blocks will be accepted.
        for i in range(3802):
            p2p1.send_message(msg_block(self.blocks[i]))
        # Syncing 2200 blocks can take a while on slow systems. Give it plenty
        # of time to sync.
        p2p1.sync_with_ping(960)
        assert_equal(
            self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'],
            3802)

        # Send blocks to node2. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p2)
        self.assert_blockchain_height(self.nodes[2], 101)
Пример #26
0
 def create_test_block_spend_utxos(self, node, txs):
     block = self.create_test_block(txs)
     block.vtx.extend([spend_tx(node, tx, self.nodeaddress) for tx in txs])
     prepare_block(block)
     return block
    def run_test(self):
        # Setup the p2p connections
        # test_node connects to node0 (not whitelisted)
        test_node = self.nodes[0].add_p2p_connection(P2PInterface())
        # min_work_node connects to node1 (whitelisted)
        min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())

        # 1. Have nodes mine a block (leave IBD)
        [
            n.generatetoaddress(1,
                                n.get_deterministic_priv_key().address)
            for n in self.nodes
        ]
        tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes]

        # 2. Send one block that builds on each tip.
        # This should be accepted by node0
        blocks_h2 = []  # the height 2 blocks on each node's chain
        block_time = int(time.time()) + 1
        for i in range(2):
            blocks_h2.append(
                create_block(tips[i], create_coinbase(2), block_time))
            blocks_h2[i].nHeight = 2
            prepare_block(blocks_h2[i])
            block_time += 1
        test_node.send_and_ping(msg_block(blocks_h2[0]))
        min_work_node.send_and_ping(msg_block(blocks_h2[1]))

        assert_equal(self.nodes[0].getblockcount(), 2)
        assert_equal(self.nodes[1].getblockcount(), 1)
        self.log.info(
            "First height 2 block accepted by node0; correctly rejected by node1"
        )

        # 3. Send another block that builds on genesis.
        block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0),
                                 create_coinbase(1), block_time)
        block_h1f.nHeight = 1
        prepare_block(block_h1f)
        block_time += 1
        test_node.send_and_ping(msg_block(block_h1f))

        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h1f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, block_h1f.hash)

        # 4. Send another two block that build on the fork.
        block_h2f = create_block(block_h1f.sha256, create_coinbase(2),
                                 block_time)
        block_h2f.nHeight = 2
        prepare_block(block_h2f)
        block_time += 1
        test_node.send_and_ping(msg_block(block_h2f))

        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h2f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found

        # But this block should be accepted by node since it has equal work.
        self.nodes[0].getblock(block_h2f.hash)
        self.log.info("Second height 2 block accepted, but not reorg'ed to")

        # 4b. Now send another block that builds on the forking chain.
        block_h3 = create_block(block_h2f.sha256, create_coinbase(3),
                                block_h2f.nTime + 1)
        block_h3.nHeight = 3
        prepare_block(block_h3)
        test_node.send_and_ping(msg_block(block_h3))

        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h3.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        self.nodes[0].getblock(block_h3.hash)

        # But this block should be accepted by node since it has more work.
        self.nodes[0].getblock(block_h3.hash)
        self.log.info("Unrequested more-work block accepted")

        # 4c. Now mine 288 more blocks and deliver; all should be processed but
        # the last (height-too-high) on node (as long as it is not missing any
        # headers)
        tip = block_h3
        all_blocks = []
        for i in range(288):
            height = i + 4
            next_block = create_block(tip.sha256, create_coinbase(i + 4),
                                      tip.nTime + 1)
            next_block.nHeight = height
            prepare_block(next_block)
            all_blocks.append(next_block)
            tip = next_block

        # Now send the block at height 5 and check that it wasn't accepted
        # (missing header)
        test_node.send_and_ping(msg_block(all_blocks[1]))
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock,
                                all_blocks[1].hash)
        assert_raises_rpc_error(-5, "Block not found",
                                self.nodes[0].getblockheader,
                                all_blocks[1].hash)

        # The block at height 5 should be accepted if we provide the missing
        # header, though
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(all_blocks[0]))
        test_node.send_message(headers_message)
        test_node.send_and_ping(msg_block(all_blocks[1]))
        self.nodes[0].getblock(all_blocks[1].hash)

        # Now send the blocks in all_blocks
        for i in range(288):
            test_node.send_message(msg_block(all_blocks[i]))
        test_node.sync_with_ping()

        # Blocks 1-287 should be accepted, block 288 should be ignored because
        # it's too far ahead
        for x in all_blocks[:-1]:
            self.nodes[0].getblock(x.hash)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, all_blocks[-1].hash)

        # 5. Test handling of unrequested block on the node that didn't process
        # Should still not be processed (even though it has a child that has more
        # work).

        # The node should have requested the blocks at some point, so
        # disconnect/reconnect first
        self.nodes[0].disconnect_p2ps()
        self.nodes[1].disconnect_p2ps()

        test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        test_node.send_and_ping(msg_block(block_h1f))
        assert_equal(self.nodes[0].getblockcount(), 2)
        self.log.info(
            "Unrequested block that would complete more-work chain was ignored"
        )

        # 6. Try to get node to request the missing block.
        # Poke the node with an inv for block at height 3 and see if that
        # triggers a getdata on block 2 (it should if block 2 is missing).
        with mininode_lock:
            # Clear state so we can check the getdata request
            test_node.last_message.pop("getdata", None)
            test_node.send_message(msg_inv([CInv(MSG_BLOCK, block_h3.sha256)]))

        test_node.sync_with_ping()
        with mininode_lock:
            getdata = test_node.last_message["getdata"]

        # Check that the getdata includes the right block
        assert_equal(getdata.inv[0].hash, block_h1f.sha256)
        self.log.info("Inv at tip triggered getdata for unprocessed block")

        # 7. Send the missing block for the third time (now it is requested)
        test_node.send_and_ping(msg_block(block_h1f))
        assert_equal(self.nodes[0].getblockcount(), 290)
        self.nodes[0].getblock(all_blocks[286].hash)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, all_blocks[287].hash)
        self.log.info(
            "Successfully reorged to longer chain from non-whitelisted peer")

        # 8. Create a chain which is invalid at a height longer than the
        # current chain, but which has more blocks on top of that
        block_289f = create_block(all_blocks[284].sha256, create_coinbase(289),
                                  all_blocks[284].nTime + 1)
        block_289f.nHeight = 289
        prepare_block(block_289f)

        block_290f = create_block(block_289f.sha256, create_coinbase(290),
                                  block_289f.nTime + 1)
        block_290f.nHeight = 290
        prepare_block(block_290f)

        block_291 = create_block(block_290f.sha256, create_coinbase(291),
                                 block_290f.nTime + 1)
        block_291.nHeight = 291
        # block_291 spends a coinbase below maturity!
        block_291.vtx.append(
            create_tx_with_script(block_290f.vtx[0],
                                  0,
                                  script_sig=b"42",
                                  amount=1))
        prepare_block(block_291)

        block_292 = create_block(block_291.sha256, create_coinbase(292),
                                 block_291.nTime + 1)
        block_292.nHeight = 292
        prepare_block(block_292)

        # Now send all the headers on the chain and enough blocks to trigger
        # reorg
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_289f))
        headers_message.headers.append(CBlockHeader(block_290f))
        headers_message.headers.append(CBlockHeader(block_291))
        headers_message.headers.append(CBlockHeader(block_292))
        test_node.send_and_ping(headers_message)

        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_292.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, block_292.hash)

        test_node.send_message(msg_block(block_289f))
        test_node.send_and_ping(msg_block(block_290f))

        self.nodes[0].getblock(block_289f.hash)
        self.nodes[0].getblock(block_290f.hash)

        test_node.send_message(msg_block(block_291))

        # At this point we've sent an obviously-bogus block, wait for full processing
        # without assuming whether we will be disconnected or not
        try:
            # Only wait a short while so the test doesn't take forever if we do get
            # disconnected
            test_node.sync_with_ping(timeout=1)
        except AssertionError:
            test_node.wait_for_disconnect()

            self.nodes[0].disconnect_p2ps()
            test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        # We should have failed reorg and switched back to 290 (but have block
        # 291)
        assert_equal(self.nodes[0].getblockcount(), 290)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"],
                     -1)

        # Now send a new header on the invalid chain, indicating we're forked
        # off, and expect to get disconnected
        block_293 = create_block(block_292.sha256, create_coinbase(293),
                                 block_292.nTime + 1)
        block_293.nHeight = 293
        prepare_block(block_293)
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_293))
        test_node.send_message(headers_message)
        test_node.wait_for_disconnect()

        # 9. Connect node1 to node0 and ensure it is able to sync
        connect_nodes(self.nodes[0], self.nodes[1])
        self.sync_blocks([self.nodes[0], self.nodes[1]])
        self.log.info("Successfully synced nodes 1 and 0")
Пример #28
0
    def run_test(self):
        # Add p2p connection to node0
        node = self.nodes[0]  # convenience reference to the node
        node.add_p2p_connection(P2PDataStore())

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase")

        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.nHeight = height
        prepare_block(block)
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block1], node, success=True)

        self.log.info("Mature the block.")
        node.generatetoaddress(100, node.get_deterministic_priv_key().address)

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        # Use merkle-root malleability to generate an invalid block with
        # same blockheader (CVE-2012-2459).
        # Manufacture a block with 3 transactions (coinbase, spend of prior
        # coinbase, spend of that spend).  Duplicate the 3rd transaction to
        # leave merkle root and blockheader unchanged but invalidate the block.
        # For more information on merkle-root malleability see
        # src/consensus/merkle.cpp.
        self.log.info("Test merkle root malleability.")

        block2 = create_block(tip, create_coinbase(height), block_time)
        block2.nHeight = height
        block_time += 1

        # b'0x51' is OP_TRUE
        tx1 = create_tx_with_script(
            block1.vtx[0], 1, script_sig=b'', amount=int(SUBSIDY * COIN))
        tx2 = create_tx_with_script(
            tx1, 0, script_sig=b'\x51', amount=int(SUBSIDY * COIN))

        block2.vtx.extend([tx1, tx2])
        block2.vtx = [block2.vtx[0]] + \
            sorted(block2.vtx[1:], key=lambda tx: tx.get_id())
        prepare_block(block2)
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        # Lotus fixed CVE-2012-2459, therefore mutating results in a different merkle root
        block2.vtx.append(block2.vtx[2])
        assert block2.hashMerkleRoot != block2.calc_merkle_root()
        assert block2_orig.vtx != block2.vtx
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.nSize = len(block2.serialize())
        block2.solve()
        assert orig_hash != block2.rehash()

        node.p2p.send_blocks_and_test(
            [block2], node, success=False, reject_reason='tx-duplicate')

        # Check transactions for duplicate inputs (CVE-2018-17144)
        self.log.info("Test duplicate input block.")

        block2_dup = copy.deepcopy(block2_orig)
        block2_dup.vtx[2].vin.append(block2_dup.vtx[2].vin[0])
        block2_dup.vtx[2].rehash()
        prepare_block(block2_dup)
        node.p2p.send_blocks_and_test(
            [block2_dup], node, success=False,
            reject_reason='bad-txns-inputs-duplicate')

        self.log.info("Test very broken block.")

        block3 = create_block(tip, create_coinbase(height), block_time)
        block3.nHeight = height
        block_time += 1
        block3.vtx[0].vout[0].nValue = int(2 * SUBSIDY * COIN)  # Too high!
        block3.vtx[0].rehash()
        prepare_block(block3)

        node.p2p.send_blocks_and_test(
            [block3], node, success=False, reject_reason='bad-cb-amount')

        # Complete testing of CVE-2012-2459 by sending the original block.
        # It should be accepted even though it has the same hash as the mutated
        # one.

        self.log.info("Test accepting original block after rejecting its"
                      " mutated version.")
        node.p2p.send_blocks_and_test([block2_orig], node, success=True,
                                      timeout=5)

        # Update tip info
        height += 1
        block_time += 1
        tip = int(block2_orig.hash, 16)

        # Complete testing of CVE-2018-17144, by checking for the inflation bug.
        # Create a block that spends the output of a tx in a previous block.
        block4 = create_block(tip, create_coinbase(height), block_time)
        block4.nHeight = height
        tx3 = create_tx_with_script(tx2, 0, script_sig=b'\x51',
                                    amount=int(SUBSIDY * COIN))

        # Duplicates input
        tx3.vin.append(tx3.vin[0])
        tx3.rehash()
        block4.vtx.append(tx3)
        prepare_block(block4)
        self.log.info("Test inflation by duplicating input")
        node.p2p.send_blocks_and_test([block4], node, success=False,
                                      reject_reason='bad-txns-inputs-duplicate')