Exemple #1
0
 def get_tx(self):
     return create_tx_with_script(
         self.spend_tx, 0, script_sig=b'\x64' * 35,
         amount=(self.spend_avail // 2))
Exemple #2
0
 def get_tx(self):
     return create_tx_with_script(
         self.spend_tx, 0, script_pub_key=basic_p2sh, amount=(self.spend_avail + 1))
Exemple #3
0
 def get_tx(self):
     return create_tx_with_script(self.spend_tx, 0, amount=-1)
Exemple #4
0
 def get_tx(self):
     return create_tx_with_script(self.spend_tx,
                                  0,
                                  script_pub_key=basic_p2sh,
                                  amount=(self.spend_avail + 1))
Exemple #5
0
 def get_tx(self):
     return create_tx_with_script(self.spend_tx, 0, amount=-1)
    def run_test(self):
        # Add p2p connection to node0
        node = self.nodes[0]  # convenience reference to the node
        node.add_p2p_connection(P2PDataStore())

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase")

        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block1], node, success=True)

        self.log.info("Mature the block.")
        node.generate(100)

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        # Use merkle-root malleability to generate an invalid block with
        # same blockheader.
        # Manufacture a block with 3 transactions (coinbase, spend of prior
        # coinbase, spend of that spend).  Duplicate the 3rd transaction to
        # leave merkle root and blockheader unchanged but invalidate the block.
        self.log.info("Test merkle root malleability.")

        block2 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1

        # b'0x51' is OP_TRUE
        tx1 = create_tx_with_script(block1.vtx[0], 0, script_sig=b'\x51', amount=50 * COIN)
        tx2 = create_tx_with_script(tx1, 0, script_sig=b'\x51', amount=50 * COIN)

        block2.vtx.extend([tx1, tx2])
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.rehash()
        block2.solve()
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        block2.vtx.append(tx2)
        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
        assert_equal(orig_hash, block2.rehash())
        assert block2_orig.vtx != block2.vtx

        node.p2p.send_blocks_and_test([block2], node, success=False, reject_code=16, reject_reason=b'bad-txns-duplicate')

        # Check transactions for duplicate inputs
        self.log.info("Test duplicate input block.")

        block2_orig.vtx[2].vin.append(block2_orig.vtx[2].vin[0])
        block2_orig.vtx[2].rehash()
        block2_orig.hashMerkleRoot = block2_orig.calc_merkle_root()
        block2_orig.rehash()
        block2_orig.solve()
        node.p2p.send_blocks_and_test([block2_orig], node, success=False, reject_reason=b'bad-txns-inputs-duplicate')

        self.log.info("Test very broken block.")

        block3 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1
        block3.vtx[0].vout[0].nValue = 100 * COIN  # Too high!
        block3.vtx[0].sha256 = None
        block3.vtx[0].calc_sha256()
        block3.hashMerkleRoot = block3.calc_merkle_root()
        block3.rehash()
        block3.solve()

        node.p2p.send_blocks_and_test([block3], node, success=False, reject_code=16, reject_reason=b'bad-cb-amount')
Exemple #7
0
    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        privkey = b"aa3680d5d48a8283413f7a108367c7299ca73f553735860a87b08f39395618b7"
        key = CECKey()
        key.set_secretbytes(privkey)
        key.set_compressed(True)
        pubkey = CPubKey(key.get_pubkey())
        pubkeyhash = hash160(pubkey)
        SCRIPT_PUB_KEY = CScript([
            CScriptOp(OP_DUP),
            CScriptOp(OP_HASH160), pubkeyhash,
            CScriptOp(OP_EQUALVERIFY),
            CScriptOp(OP_CHECKSIG)
        ])

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        block = create_block(tip, create_coinbase(height, pubkey), block_time)
        block.solve(self.signblockprivkey)
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block], node, success=True)

        # b'\x64' is OP_NOTIF
        # Transaction will be rejected with code 16 (REJECT_INVALID)
        self.log.info('Test a transaction that is rejected')
        tx1 = create_tx_with_script(block1.vtx[0],
                                    0,
                                    script_sig=b'\x64' * 35,
                                    amount=50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1],
                                   node,
                                   success=False,
                                   expect_disconnect=False)

        # Make two p2p connections to provide the node with orphans
        # * p2ps[0] will send valid orphan txs (one with low fee)
        # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
        self.reconnect_p2p(num_connections=2)

        self.log.info('Test orphan transaction handling ... ')
        # Create a root transaction that we withhold until all dependend transactions
        # are sent out and in the orphan cache
        tx_withhold = CTransaction()
        tx_withhold.vin.append(
            CTxIn(outpoint=COutPoint(block1.vtx[0].malfixsha256, 0)))
        tx_withhold.vout.append(
            CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY))
        tx_withhold.calc_sha256()
        (sighash, err) = SignatureHash(CScript([pubkey, OP_CHECKSIG]),
                                       tx_withhold, 0, SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_withhold.vin[0].scriptSig = CScript([signature])

        # Our first orphan tx with some outputs to create further orphan txs
        tx_orphan_1 = CTransaction()
        tx_orphan_1.vin.append(
            CTxIn(outpoint=COutPoint(tx_withhold.malfixsha256, 0)))
        tx_orphan_1.vout = [
            CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY)
        ] * 3
        tx_orphan_1.calc_sha256()
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_1, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_1.vin[0].scriptSig = CScript([signature, pubkey])

        # A valid transaction with low fee
        tx_orphan_2_no_fee = CTransaction()
        tx_orphan_2_no_fee.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.malfixsha256, 0)))
        tx_orphan_2_no_fee.vout.append(
            CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY))
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_2_no_fee, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_2_no_fee.vin[0].scriptSig = CScript([signature, pubkey])

        # A valid transaction with sufficient fee
        tx_orphan_2_valid = CTransaction()
        tx_orphan_2_valid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.malfixsha256, 1)))
        tx_orphan_2_valid.vout.append(
            CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY))
        tx_orphan_2_valid.calc_sha256()
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_2_valid, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_2_valid.vin[0].scriptSig = CScript([signature, pubkey])

        # An invalid transaction with negative fee
        tx_orphan_2_invalid = CTransaction()
        tx_orphan_2_invalid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.malfixsha256, 2)))
        tx_orphan_2_invalid.vout.append(
            CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY))
        (sighash, err) = SignatureHash(SCRIPT_PUB_KEY, tx_orphan_2_invalid, 0,
                                       SIGHASH_ALL)
        signature = key.sign(sighash) + b'\x01'  # 0x1 is SIGHASH_ALL
        tx_orphan_2_invalid.vin[0].scriptSig = CScript([signature, pubkey])

        self.log.info('Send the orphans ... ')
        # Send valid orphan txs from p2ps[0]
        node.p2p.send_txs_and_test(
            [tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid],
            node,
            success=False)
        # Send invalid tx from p2ps[1]
        node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid],
                                       node,
                                       success=False)

        assert_equal(0,
                     node.getmempoolinfo()['size'])  # Mempool should be empty
        assert_equal(2, len(node.getpeerinfo()))  # p2ps[1] is still connected

        self.log.info('Send the withhold tx ... ')
        node.p2p.send_txs_and_test([tx_withhold], node, success=True)

        # Transactions that should end up in the mempool
        expected_mempool = {
            t.hashMalFix
            for t in [
                tx_withhold,  # The transaction that is the root for all orphans
                tx_orphan_1,  # The orphan transaction that splits the coins
                tx_orphan_2_valid,  # The valid transaction (with sufficient fee)
            ]
        }
        # Transactions that do not end up in the mempool
        # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
        # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)

        wait_until(lambda: 1 == len(node.getpeerinfo()),
                   timeout=12)  # p2ps[1] is no longer connected
        assert_equal(expected_mempool, set(node.getrawmempool()))

        # restart node with sending BIP61 messages disabled, check that it disconnects without sending the reject message
        self.log.info(
            'Test a transaction that is rejected, with BIP61 disabled')
        self.restart_node(0, ['-enablebip61=0', '-persistmempool=0'])
        self.reconnect_p2p(num_connections=1)
        node.p2p.send_txs_and_test([tx1],
                                   node,
                                   success=False,
                                   expect_disconnect=False)
        # send_txs_and_test will have waited for disconnect, so we can safely check that no reject has been received
        assert_equal(node.p2p.reject_code_received, None)
Exemple #8
0
 def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
     tx = create_tx_with_script(spend_tx, n, b"", value, script)
     return tx
Exemple #9
0
 def get_tx(self):
     return create_tx_with_script(self.spend_tx,
                                  0,
                                  script_sig=b'\x64' * 35,
                                  amount=(self.spend_avail // 2))
Exemple #10
0
 def get_tx(self):
     lotsa_checksigs = sc.CScript([sc.OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
     return create_tx_with_script(self.spend_tx,
                                  0,
                                  script_pub_key=lotsa_checksigs,
                                  amount=1)
Exemple #11
0
    def run_test(self):
        # Add p2p connection to node0
        node = self.nodes[0]  # convenience reference to the node
        peer = node.add_p2p_connection(P2PDataStore())

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase")

        height = 1
        block = create_block(tip,
                             create_coinbase(height),
                             block_time,
                             version=0x20000000)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        peer.send_blocks_and_test([block1], node, success=True)

        self.log.info("Mature the block.")
        node.generatetoaddress(100, node.get_deterministic_priv_key().address)

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        # Use merkle-root malleability to generate an invalid block with
        # same blockheader (CVE-2012-2459).
        # Manufacture a block with 3 transactions (coinbase, spend of prior
        # coinbase, spend of that spend).  Duplicate the 3rd transaction to
        # leave merkle root and blockheader unchanged but invalidate the block.
        # For more information on merkle-root malleability see src/consensus/merkle.cpp.
        self.log.info("Test merkle root malleability.")

        block2 = create_block(tip,
                              create_coinbase(height),
                              block_time,
                              version=0x20000000)
        block_time += 1

        # b'0x51' is OP_TRUE
        tx1 = create_tx_with_script(block1.vtx[0],
                                    0,
                                    script_sig=b'\x51',
                                    amount=50 * COIN)
        tx2 = create_tx_with_script(tx1,
                                    0,
                                    script_sig=b'\x51',
                                    amount=50 * COIN)

        block2.vtx.extend([tx1, tx2])
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.rehash()
        block2.solve()
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        block2.vtx.append(tx2)
        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
        assert_equal(orig_hash, block2.rehash())
        assert block2_orig.vtx != block2.vtx

        peer.send_blocks_and_test([block2],
                                  node,
                                  success=False,
                                  reject_reason='bad-txns-duplicate')

        # Check transactions for duplicate inputs (CVE-2018-17144)
        self.log.info("Test duplicate input block.")

        block2_dup = copy.deepcopy(block2_orig)
        block2_dup.vtx[2].vin.append(block2_dup.vtx[2].vin[0])
        block2_dup.vtx[2].rehash()
        block2_dup.hashMerkleRoot = block2_dup.calc_merkle_root()
        block2_dup.rehash()
        block2_dup.solve()
        peer.send_blocks_and_test([block2_dup],
                                  node,
                                  success=False,
                                  reject_reason='bad-txns-inputs-duplicate')

        self.log.info("Test very broken block.")

        block3 = create_block(tip,
                              create_coinbase(height),
                              block_time,
                              version=0x20000000)
        block_time += 1
        block3.vtx[0].vout[0].nValue = 100 * COIN  # Too high!
        block3.vtx[0].sha256 = None
        block3.vtx[0].calc_sha256()
        block3.hashMerkleRoot = block3.calc_merkle_root()
        block3.rehash()
        block3.solve()

        peer.send_blocks_and_test([block3],
                                  node,
                                  success=False,
                                  reject_reason='bad-cb-amount')

        # Complete testing of CVE-2012-2459 by sending the original block.
        # It should be accepted even though it has the same hash as the mutated one.

        self.log.info(
            "Test accepting original block after rejecting its mutated version."
        )
        peer.send_blocks_and_test([block2_orig], node, success=True, timeout=5)

        # Update tip info
        height += 1
        block_time += 1
        tip = int(block2_orig.hash, 16)

        # Complete testing of CVE-2018-17144, by checking for the inflation bug.
        # Create a block that spends the output of a tx in a previous block.
        block4 = create_block(tip,
                              create_coinbase(height),
                              block_time,
                              version=0x20000000)
        tx3 = create_tx_with_script(tx2,
                                    0,
                                    script_sig=b'\x51',
                                    amount=50 * COIN)

        # Duplicates input
        tx3.vin.append(tx3.vin[0])
        tx3.rehash()
        block4.vtx.append(tx3)
        block4.hashMerkleRoot = block4.calc_merkle_root()
        block4.rehash()
        block4.solve()
        self.log.info("Test inflation by duplicating input")
        peer.send_blocks_and_test([block4],
                                  node,
                                  success=False,
                                  reject_reason='bad-txns-inputs-duplicate')
Exemple #12
0
    def run_test(self):
        # create a p2p receiver
        dspReceiver = P2PInterface()
        self.nodes[0].add_p2p_connection(dspReceiver)
        # workaround - nodes think they're in IBD unless one block is mined
        self.nodes[0].generate(1)
        self.sync_all()
        # Disconnect the third node, will be used later for triple-spend
        disconnect_nodes(self.nodes[1], self.nodes[2])
        # Put fourth node (the non-dsproof-enabled node) with the connected group
        # (we will check its log at the end to ensure it ignored dsproof inv's)
        non_dsproof_node = self.nodes[3]
        disconnect_nodes(self.nodes[2], non_dsproof_node)
        connect_nodes(self.nodes[1], non_dsproof_node)

        # Create and mine a regular non-coinbase transaction for spending
        fundingtxid = self.nodes[0].getblock(
            self.nodes[0].getblockhash(1))['tx'][0]
        fundingtx = FromHex(CTransaction(),
                            self.nodes[0].getrawtransaction(fundingtxid))

        # Create three conflicting transactions. They are only signed, but not yet submitted to the mempool
        firstDSTx = create_raw_transaction(self.nodes[0], fundingtxid,
                                           self.nodes[0].getnewaddress(),
                                           49.95)
        secondDSTx = create_raw_transaction(self.nodes[0], fundingtxid,
                                            self.nodes[0].getnewaddress(),
                                            49.95)
        thirdDSTx = create_raw_transaction(self.nodes[0], fundingtxid,
                                           self.nodes[0].getnewaddress(),
                                           49.95)

        # Send the two conflicting transactions to the network
        # Submit to two different nodes, because a node would simply reject
        # a double spend submitted through RPC
        firstDSTxId = self.nodes[0].sendrawtransaction(firstDSTx)
        self.nodes[1].sendrawtransaction(secondDSTx)
        wait_until(lambda: dspReceiver.message_count["dsproof-beta"] == 1,
                   lock=mininode_lock,
                   timeout=25)

        # 1. The DSP message is well-formed and contains all fields
        # If the message arrived and was deserialized successfully, then 1. is satisfied
        dsp = dspReceiver.last_message["dsproof-beta"].dsproof
        dsps = set()
        dsps.add(dsp.serialize())

        # Check that it is valid, both spends are signed with the same key
        # NB: pushData is made of the sig + one last byte for hashtype
        pubkey = self.getpubkey()
        sighash1 = getSighashes(dsp.getPrevOutput(), dsp.spender1, fundingtx)
        sighash2 = getSighashes(dsp.getPrevOutput(), dsp.spender2, fundingtx)
        assert (pubkey.verify_ecdsa(dsp.spender1.pushData[0][:-1], sighash1))
        assert (pubkey.verify_ecdsa(dsp.spender2.pushData[0][:-1], sighash2))

        # 2. For p2pkh these is exactly one pushdata per spender
        assert_equal(1, len(dsp.spender1.pushData))
        assert_equal(1, len(dsp.spender2.pushData))

        # 3. The two spenders are different, specifically the signature (push data) has to be different.
        assert (dsp.spender1.pushData != dsp.spender2.pushData)

        # 4. The first & double spenders are sorted with two hashes as keys.
        assert (dsp.spender1.hashOutputs < dsp.spender2.hashOutputs)

        # 5. The double spent output is still available in the UTXO database,
        #    implying no spending transaction has been mined.
        assert_equal(
            self.nodes[0].gettransaction(firstDSTxId)["confirmations"], 0)

        # The original fundingtx is the same as the transaction being spent reported by the DSP
        assert_equal(hex(dsp.prevTxId)[2:], fundingtxid)
        assert_equal(dsp.prevOutIndex, 0)

        # 6. No other valid proof is known.
        #    IE if a valid proof is known, no new proofs will be constructed
        #    We submit a _triple_ spend transaction to the third node
        connect_nodes(self.nodes[0], self.nodes[2])
        self.nodes[2].sendrawtransaction(thirdDSTx)
        #    Await for a new dsp to be relayed to the node
        #    if such a dsp (or the double or triple spending tx) arrives, the test fails
        assert_raises(AssertionError,
                      wait_until,
                      lambda: dspReceiver.message_count["dsproof-beta"] == 2 or
                      dspReceiver.message_count["tx"] == 2,
                      lock=mininode_lock,
                      timeout=5)

        # Only P2PKH inputs are protected
        # Check that a non-P2PKH output is not protected
        self.nodes[0].generate(1)
        fundingtxid = self.nodes[0].getblock(
            self.nodes[0].getblockhash(2))['tx'][0]
        fundingtx = FromHex(CTransaction(),
                            self.nodes[0].getrawtransaction(fundingtxid))
        fundingtx.rehash()
        nonP2PKHTx = create_tx_with_script(fundingtx, 0,
                                           b'', int(49.95 * COIN),
                                           CScript([OP_TRUE]))
        signedNonP2PKHTx = self.nodes[0].signrawtransactionwithwallet(
            ToHex(nonP2PKHTx))
        self.nodes[0].sendrawtransaction(signedNonP2PKHTx['hex'])
        self.sync_all()

        tx = FromHex(CTransaction(), signedNonP2PKHTx['hex'])
        tx.rehash()

        firstDSTx = create_tx_with_script(tx, 0, b'', int(49.90 * COIN),
                                          CScript([OP_TRUE]))
        secondDSTx = create_tx_with_script(tx, 0, b'', int(49.90 * COIN),
                                           CScript([OP_FALSE]))

        self.nodes[0].sendrawtransaction(ToHex(firstDSTx))
        self.nodes[1].sendrawtransaction(ToHex(secondDSTx))

        assert_raises(AssertionError,
                      wait_until,
                      lambda: dspReceiver.message_count["dsproof-beta"] == 2,
                      lock=mininode_lock,
                      timeout=5)

        # Check that unconfirmed outputs are also protected
        self.nodes[0].generate(1)
        unconfirmedtx = self.nodes[0].sendtoaddress(
            self.nodes[0].getnewaddress(), 25)
        self.sync_all()

        firstDSTx = create_raw_transaction(self.nodes[0], unconfirmedtx,
                                           self.nodes[0].getnewaddress(), 24.9)
        secondDSTx = create_raw_transaction(self.nodes[0], unconfirmedtx,
                                            self.nodes[0].getnewaddress(),
                                            24.9)

        self.nodes[0].sendrawtransaction(firstDSTx)
        self.nodes[1].sendrawtransaction(secondDSTx)

        wait_until(lambda: dspReceiver.message_count["dsproof-beta"] == 2,
                   lock=mininode_lock,
                   timeout=5)
        dsp2 = dspReceiver.last_message["dsproof-beta"].dsproof
        dsps.add(dsp2.serialize())
        assert (len(dsps) == 2)

        # Check that a double spent tx, which has some non-P2PKH inputs
        # in its ancestor, still results in a dsproof being emitted.
        self.nodes[0].generate(1)
        # Create a 1-of-2 multisig address which will be an in-mempool
        # ancestor to a double-spent tx
        pubkey0 = self.nodes[0].getaddressinfo(
            self.nodes[0].getnewaddress())['pubkey']
        pubkey1 = self.nodes[1].getaddressinfo(
            self.nodes[1].getnewaddress())['pubkey']
        p2sh = self.nodes[0].addmultisigaddress(1, [pubkey0, pubkey1],
                                                "")['address']
        # Fund the p2sh address
        fundingtxid = self.nodes[0].sendtoaddress(p2sh, 49)
        vout = find_output(self.nodes[0], fundingtxid, Decimal('49'))
        self.sync_all()

        # Spend from the P2SH to a P2PKH, which we will double spend from
        # in the next step.
        p2pkh1 = self.nodes[0].getnewaddress()
        rawtx1 = create_raw_transaction(self.nodes[0], fundingtxid, p2pkh1,
                                        48.999, vout)
        signed_tx1 = self.nodes[0].signrawtransactionwithwallet(rawtx1)
        txid1 = self.nodes[0].sendrawtransaction(signed_tx1['hex'])
        vout1 = find_output(self.nodes[0], txid1, Decimal('48.999'))
        self.sync_all()

        # Now double spend the P2PKH which has a P2SH ancestor.
        firstDSTx = create_raw_transaction(self.nodes[0], txid1,
                                           self.nodes[0].getnewaddress(), 48.9,
                                           vout1)
        secondDSTx = create_raw_transaction(self.nodes[0], txid1,
                                            self.nodes[1].getnewaddress(),
                                            48.9, vout1)
        self.nodes[0].sendrawtransaction(firstDSTx)
        self.nodes[1].sendrawtransaction(secondDSTx)

        # We still get a dsproof, showing that not all ancestors have
        # to be P2PKH.
        wait_until(lambda: dspReceiver.message_count["dsproof-beta"] == 3,
                   lock=mininode_lock,
                   timeout=5)
        dsp3 = dspReceiver.last_message["dsproof-beta"].dsproof
        dsps.add(dsp3.serialize())
        assert (len(dsps) == 3)

        # Check that a double spent tx, which has some unconfirmed ANYONECANPAY
        # transactions in its ancestry, still results in a dsproof being emitted.
        self.nodes[0].generate(1)
        fundingtxid = self.nodes[0].getblock(
            self.nodes[0].getblockhash(5))['tx'][0]
        vout1 = find_output(self.nodes[0], fundingtxid, Decimal('50'))
        addr = self.nodes[1].getnewaddress()
        pubkey = self.nodes[1].getaddressinfo(addr)['pubkey']
        inputs = [{
            'txid': fundingtxid,
            'vout': vout1,
            'amount': 49.99,
            'scriptPubKey': pubkey
        }]
        outputs = {addr: 49.99}
        rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
        signed = self.nodes[0].signrawtransactionwithwallet(
            rawtx, None, "NONE|FORKID|ANYONECANPAY")
        assert 'complete' in signed
        assert_equal(signed['complete'], True)
        assert 'errors' not in signed
        txid = self.nodes[0].sendrawtransaction(signed['hex'])
        self.sync_all()
        # The ANYONECANPAY is still unconfirmed, but let's create some
        # double spends from it.
        vout2 = find_output(self.nodes[0], txid, Decimal('49.99'))
        firstDSTx = create_raw_transaction(self.nodes[1], txid,
                                           self.nodes[0].getnewaddress(),
                                           49.98, vout2)
        secondDSTx = create_raw_transaction(self.nodes[1], txid,
                                            self.nodes[1].getnewaddress(),
                                            49.98, vout2)
        self.nodes[0].sendrawtransaction(firstDSTx)
        self.nodes[1].sendrawtransaction(secondDSTx)
        # We get a dsproof.
        wait_until(lambda: dspReceiver.message_count["dsproof-beta"] == 4,
                   lock=mininode_lock,
                   timeout=5)
        dsp4 = dspReceiver.last_message["dsproof-beta"].dsproof
        dsps.add(dsp4.serialize())
        assert (len(dsps) == 4)

        # Create a P2SH to double-spend directly (1-of-1 multisig)
        self.nodes[0].generate(1)
        self.sync_all()
        pubkey2 = self.nodes[0].getaddressinfo(
            self.nodes[0].getnewaddress())['pubkey']
        p2sh = self.nodes[0].addmultisigaddress(1, [
            pubkey2,
        ], "")['address']
        fundingtxid = self.nodes[0].sendtoaddress(p2sh, 49)
        vout = find_output(self.nodes[0], fundingtxid, Decimal('49'))
        self.sync_all()
        # Now double spend it
        firstDSTx = create_raw_transaction(self.nodes[0], fundingtxid,
                                           self.nodes[0].getnewaddress(), 48.9,
                                           vout)
        secondDSTx = create_raw_transaction(self.nodes[0], fundingtxid,
                                            self.nodes[1].getnewaddress(),
                                            48.9, vout)
        self.nodes[0].sendrawtransaction(firstDSTx)
        self.nodes[1].sendrawtransaction(secondDSTx)
        # No dsproof is generated.
        assert_raises(AssertionError,
                      wait_until,
                      lambda: dspReceiver.message_count["dsproof-beta"] == 5,
                      lock=mininode_lock,
                      timeout=5)

        # Check end conditions - still only 4 DSPs
        last_dsp = dspReceiver.last_message["dsproof-beta"].dsproof
        dsps.add(last_dsp.serialize())
        assert (len(dsps) == 4)

        # Next, test that submitting a double-spend via the RPC interface also results in a broadcasted
        # dsproof
        self.nodes[0].generate(1)
        self.sync_all()
        fundingtxid = self.nodes[0].getblock(
            self.nodes[0].getblockhash(6))['tx'][0]
        # Create 2 new double-spends
        firstDSTx = create_raw_transaction(self.nodes[0], fundingtxid,
                                           self.nodes[0].getnewaddress(),
                                           49.95)
        secondDSTx = create_raw_transaction(self.nodes[0], fundingtxid,
                                            self.nodes[0].getnewaddress(),
                                            49.95)

        # Send the two conflicting transactions to the same node via RPC
        assert_equal(dspReceiver.message_count["dsproof-beta"], 4)
        self.nodes[0].sendrawtransaction(firstDSTx)
        # send second tx to same node via RPC
        # -- it's normal for it to reject the tx, but it should still generate a dsproof broadcast
        assert_raises_rpc_error(-26, "txn-mempool-conflict (code 18)",
                                self.nodes[0].sendrawtransaction, secondDSTx)
        wait_until(lambda: dspReceiver.message_count["dsproof-beta"] == 5,
                   lock=mininode_lock,
                   timeout=5)

        # Finally, ensure that the non-dsproof node has the messages we expect in its log
        # (this checks that dsproof was disabled for this node)
        debug_log = os.path.join(non_dsproof_node.datadir, 'regtest',
                                 'debug.log')
        dsp_inv_ctr = 0
        with open(debug_log, encoding='utf-8') as dl:
            for line in dl.readlines():
                if "Got DSProof INV" in line:
                    # Ensure that if this node did see a dsproof inv, it explicitly ignored it
                    assert "(ignored, -doublespendproof=0)" in line
                    dsp_inv_ctr += 1
                else:
                    # Ensure this node is not processing dsproof messages and not requesting them via getdata
                    assert ("received: dsproof-beta" not in line
                            and "Good DSP" not in line
                            and "DSP broadcasting" not in line
                            and "bad-dsproof" not in line)
        # We expect it to have received at least some DSP inv broadcasts
        assert_greater_than(dsp_inv_ctr, 0)
Exemple #13
0
    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block], node, success=True)

        self.log.info("Mature the block.")
        self.nodes[0].generate(100)

        # b'\x64' is OP_NOTIF
        # Transaction will be rejected with code 16 (REJECT_INVALID)
        # and we get disconnected immediately
        self.log.info('Test a transaction that is rejected')
        tx1 = create_tx_with_script(block1.vtx[0],
                                    0,
                                    script_sig=b'\x64' * 35,
                                    amount=50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1],
                                   node,
                                   success=False,
                                   expect_disconnect=True)

        # Make two p2p connections to provide the node with orphans
        # * p2ps[0] will send valid orphan txs (one with low fee)
        # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
        self.reconnect_p2p(num_connections=2)

        self.log.info('Test orphan transaction handling ... ')
        # Create a root transaction that we withhold until all dependend transactions
        # are sent out and in the orphan cache
        SCRIPT_PUB_KEY_OP_TRUE = b'\x51\x75' * 15 + b'\x51'
        tx_withhold = CTransaction()
        tx_withhold.vin.append(
            CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0)))
        tx_withhold.vout.append(
            CTxOut(nValue=50 * COIN - 12000,
                   scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_withhold.calc_sha256()

        # Our first orphan tx with some outputs to create further orphan txs
        tx_orphan_1 = CTransaction()
        tx_orphan_1.vin.append(
            CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0)))
        tx_orphan_1.vout = [
            CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)
        ] * 3
        tx_orphan_1.calc_sha256()

        # A valid transaction with low fee
        tx_orphan_2_no_fee = CTransaction()
        tx_orphan_2_no_fee.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0)))
        tx_orphan_2_no_fee.vout.append(
            CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        # A valid transaction with sufficient fee
        tx_orphan_2_valid = CTransaction()
        tx_orphan_2_valid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1)))
        tx_orphan_2_valid.vout.append(
            CTxOut(nValue=10 * COIN - 12000,
                   scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_orphan_2_valid.calc_sha256()

        # An invalid transaction with negative fee
        tx_orphan_2_invalid = CTransaction()
        tx_orphan_2_invalid.vin.append(
            CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2)))
        tx_orphan_2_invalid.vout.append(
            CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        self.log.info('Send the orphans ... ')
        # Send valid orphan txs from p2ps[0]
        node.p2p.send_txs_and_test(
            [tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid],
            node,
            success=False)
        # Send invalid tx from p2ps[1]
        node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid],
                                       node,
                                       success=False)

        assert_equal(0,
                     node.getmempoolinfo()['size'])  # Mempool should be empty
        assert_equal(2, len(node.getpeerinfo()))  # p2ps[1] is still connected

        self.log.info('Send the withhold tx ... ')
        node.p2p.send_txs_and_test([tx_withhold], node, success=True)

        # Transactions that should end up in the mempool
        expected_mempool = {
            t.hash
            for t in [
                tx_withhold,  # The transaction that is the root for all orphans
                tx_orphan_1,  # The orphan transaction that splits the coins
                tx_orphan_2_valid,  # The valid transaction (with sufficient fee)
            ]
        }
        # Transactions that do not end up in the mempool
        # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
        # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)

        wait_until(lambda: 1 == len(node.getpeerinfo()),
                   timeout=12)  # p2ps[1] is no longer connected
        assert_equal(expected_mempool, set(node.getrawmempool()))

        # restart node with sending BIP61 messages disabled, check that it disconnects without sending the reject message
        self.log.info(
            'Test a transaction that is rejected, with BIP61 disabled')
        self.restart_node(0, ['-enablebip61=0', '-persistmempool=0'])
        self.reconnect_p2p(num_connections=1)
        node.p2p.send_txs_and_test([tx1],
                                   node,
                                   success=False,
                                   expect_disconnect=True)
        # send_txs_and_test will have waited for disconnect, so we can safely check that no reject has been received
        assert_equal(node.p2p.reject_code_received, None)
Exemple #14
0
 def get_tx(self):
     tx = create_tx_with_script(self.spend_tx, 0, amount=MAX_MONEY)
     tx.vout = [tx.vout[0]] * 2
     tx.calc_sha256()
     return tx
Exemple #15
0
 def get_tx(self):
     lotsa_checksigs = sc.CScript([sc.OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
     return create_tx_with_script(
         self.spend_tx, 0,
         script_pub_key=lotsa_checksigs,
         amount=1)
    def run_test(self):
        # Setup the p2p connections
        # test_node connects to node0 (not whitelisted)
        test_node = self.nodes[0].add_p2p_connection(P2PInterface())
        # min_work_node connects to node1 (whitelisted)
        min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())

        # 1. Have nodes mine a block (leave IBD)
        [n.generatetoaddress(1, n.get_deterministic_priv_key().address) for n in self.nodes]
        tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes]

        # 2. Send one block that builds on each tip.
        # This should be accepted by node0
        blocks_h2 = []  # the height 2 blocks on each node's chain
        block_time = int(time.time()) + 1
        for i in range(2):
            blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
            blocks_h2[i].solve()
            block_time += 1
        test_node.send_message(msg_block(blocks_h2[0]))
        min_work_node.send_message(msg_block(blocks_h2[1]))

        for x in [test_node, min_work_node]:
            x.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        assert_equal(self.nodes[1].getblockcount(), 1)
        self.log.info("First height 2 block accepted by node0; correctly rejected by node1")

        # 3. Send another block that builds on genesis.
        block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time)
        block_time += 1
        block_h1f.solve()
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h1f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash)

        # 4. Send another two block that build on the fork.
        block_h2f = create_block(block_h1f.sha256, create_coinbase(2), block_time)
        block_time += 1
        block_h2f.solve()
        test_node.send_message(msg_block(block_h2f))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h2f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found

        # But this block should be accepted by node since it has equal work.
        self.nodes[0].getblock(block_h2f.hash)
        self.log.info("Second height 2 block accepted, but not reorg'ed to")

        # 4b. Now send another block that builds on the forking chain.
        block_h3 = create_block(block_h2f.sha256, create_coinbase(3), block_h2f.nTime+1)
        block_h3.solve()
        test_node.send_message(msg_block(block_h3))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h3.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        self.nodes[0].getblock(block_h3.hash)

        # But this block should be accepted by node since it has more work.
        self.nodes[0].getblock(block_h3.hash)
        self.log.info("Unrequested more-work block accepted")

        # 4c. Now mine 288 more blocks and deliver; all should be processed but
        # the last (height-too-high) on node (as long as it is not missing any headers)
        tip = block_h3
        all_blocks = []
        for i in range(288):
            next_block = create_block(tip.sha256, create_coinbase(i + 4), tip.nTime+1)
            next_block.solve()
            all_blocks.append(next_block)
            tip = next_block

        # Now send the block at height 5 and check that it wasn't accepted (missing header)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash)
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash)

        # The block at height 5 should be accepted if we provide the missing header, though
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(all_blocks[0]))
        test_node.send_message(headers_message)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        self.nodes[0].getblock(all_blocks[1].hash)

        # Now send the blocks in all_blocks
        for i in range(288):
            test_node.send_message(msg_block(all_blocks[i]))
        test_node.sync_with_ping()

        # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
        for x in all_blocks[:-1]:
            self.nodes[0].getblock(x.hash)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)

        # 5. Test handling of unrequested block on the node that didn't process
        # Should still not be processed (even though it has a child that has more
        # work).

        # The node should have requested the blocks at some point, so
        # disconnect/reconnect first

        self.nodes[0].disconnect_p2ps()
        self.nodes[1].disconnect_p2ps()

        test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        self.log.info("Unrequested block that would complete more-work chain was ignored")

        # 6. Try to get node to request the missing block.
        # Poke the node with an inv for block at height 3 and see if that
        # triggers a getdata on block 2 (it should if block 2 is missing).
        with mininode_lock:
            # Clear state so we can check the getdata request
            test_node.last_message.pop("getdata", None)
            test_node.send_message(msg_inv([CInv(2, block_h3.sha256)]))

        test_node.sync_with_ping()
        with mininode_lock:
            getdata = test_node.last_message["getdata"]

        # Check that the getdata includes the right block
        assert_equal(getdata.inv[0].hash, block_h1f.sha256)
        self.log.info("Inv at tip triggered getdata for unprocessed block")

        # 7. Send the missing block for the third time (now it is requested)
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 290)
        self.nodes[0].getblock(all_blocks[286].hash)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash)
        self.log.info("Successfully reorged to longer chain from non-whitelisted peer")

        # 8. Create a chain which is invalid at a height longer than the
        # current chain, but which has more blocks on top of that
        block_289f = create_block(all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime+1)
        block_289f.solve()
        block_290f = create_block(block_289f.sha256, create_coinbase(290), block_289f.nTime+1)
        block_290f.solve()
        block_291 = create_block(block_290f.sha256, create_coinbase(291), block_290f.nTime+1)
        # block_291 spends a coinbase below maturity!
        block_291.vtx.append(create_tx_with_script(block_290f.vtx[0], 0, script_sig=b"42", amount=1))
        block_291.hashMerkleRoot = block_291.calc_merkle_root()
        block_291.solve()
        block_292 = create_block(block_291.sha256, create_coinbase(292), block_291.nTime+1)
        block_292.solve()

        # Now send all the headers on the chain and enough blocks to trigger reorg
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_289f))
        headers_message.headers.append(CBlockHeader(block_290f))
        headers_message.headers.append(CBlockHeader(block_291))
        headers_message.headers.append(CBlockHeader(block_292))
        test_node.send_message(headers_message)

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_292.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_292.hash)

        test_node.send_message(msg_block(block_289f))
        test_node.send_message(msg_block(block_290f))

        test_node.sync_with_ping()
        self.nodes[0].getblock(block_289f.hash)
        self.nodes[0].getblock(block_290f.hash)

        test_node.send_message(msg_block(block_291))

        # At this point we've sent an obviously-bogus block, wait for full processing
        # without assuming whether we will be disconnected or not
        try:
            # Only wait a short while so the test doesn't take forever if we do get
            # disconnected
            test_node.sync_with_ping(timeout=1)
        except AssertionError:
            test_node.wait_for_disconnect()

            self.nodes[0].disconnect_p2ps()
            test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        # We should have failed reorg and switched back to 290 (but have block 291)
        assert_equal(self.nodes[0].getblockcount(), 290)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"], -1)

        # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected
        block_293 = create_block(block_292.sha256, create_coinbase(293), block_292.nTime+1)
        block_293.solve()
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_293))
        test_node.send_message(headers_message)
        test_node.wait_for_disconnect()

        # 9. Connect node1 to node0 and ensure it is able to sync
        connect_nodes(self.nodes[0], 1)
        self.sync_blocks([self.nodes[0], self.nodes[1]])
        self.log.info("Successfully synced nodes 1 and 0")
Exemple #17
0
    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.nVersion = 0x20000000
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block], node, success=True)

        self.log.info("Mature the block.")
        self.nodes[0].generate(100)

        # b'\x64' is OP_NOTIF
        # Transaction will be rejected with code 16 (REJECT_INVALID)
        # and we get disconnected immediately
        self.log.info('Test a transaction that is rejected')
        tx1 = create_tx_with_script(block1.vtx[0], 0, script_sig=b'\x64' * 35, amount=50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True)

        # Make two p2p connections to provide the node with orphans
        # * p2ps[0] will send valid orphan txs (one with low fee)
        # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
        self.reconnect_p2p(num_connections=2)

        self.log.info('Test orphan transaction handling ... ')
        # Create a root transaction that we withhold until all dependend transactions
        # are sent out and in the orphan cache
        SCRIPT_PUB_KEY_OP_TRUE = b'\x51\x75' * 15 + b'\x51'
        tx_withhold = CTransaction()
        tx_withhold.vin.append(CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0)))
        tx_withhold.vout.append(CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_withhold.calc_sha256()

        # Our first orphan tx with some outputs to create further orphan txs
        tx_orphan_1 = CTransaction()
        tx_orphan_1.vin.append(CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0)))
        tx_orphan_1.vout = [CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)] * 3
        tx_orphan_1.calc_sha256()

        # A valid transaction with low fee
        tx_orphan_2_no_fee = CTransaction()
        tx_orphan_2_no_fee.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0)))
        tx_orphan_2_no_fee.vout.append(CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        # A valid transaction with sufficient fee
        tx_orphan_2_valid = CTransaction()
        tx_orphan_2_valid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1)))
        tx_orphan_2_valid.vout.append(CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_orphan_2_valid.calc_sha256()

        # An invalid transaction with negative fee
        tx_orphan_2_invalid = CTransaction()
        tx_orphan_2_invalid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2)))
        tx_orphan_2_invalid.vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        self.log.info('Send the orphans ... ')
        # Send valid orphan txs from p2ps[0]
        node.p2p.send_txs_and_test([tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False)
        # Send invalid tx from p2ps[1]
        node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid], node, success=False)

        assert_equal(0, node.getmempoolinfo()['size'])  # Mempool should be empty
        assert_equal(2, len(node.getpeerinfo()))  # p2ps[1] is still connected

        self.log.info('Send the withhold tx ... ')
        node.p2p.send_txs_and_test([tx_withhold], node, success=True)

        # Transactions that should end up in the mempool
        expected_mempool = {
            t.hash
            for t in [
                tx_withhold,  # The transaction that is the root for all orphans
                tx_orphan_1,  # The orphan transaction that splits the coins
                tx_orphan_2_valid,  # The valid transaction (with sufficient fee)
            ]
        }
        # Transactions that do not end up in the mempool
        # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
        # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)

        wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12)  # p2ps[1] is no longer connected
        assert_equal(expected_mempool, set(node.getrawmempool()))

        # restart node with sending BIP61 messages disabled, check that it disconnects without sending the reject message
        self.log.info('Test a transaction that is rejected, with BIP61 disabled')
        self.restart_node(0, ['-enablebip61=0', '-persistmempool=0'])
        self.reconnect_p2p(num_connections=1)
        with node.assert_debug_log(expected_msgs=[
                "{} from peer=0 was not accepted: mandatory-script-verify-flag-failed (Invalid OP_IF construction) (code 16)".format(tx1.hash),
                "disconnecting peer=0",
        ]):
            node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True)
        # send_txs_and_test will have waited for disconnect, so we can safely check that no reject has been received
        assert_equal(node.p2p.reject_code_received, None)
    def run_test(self):
        # Add p2p connection to node0
        node = self.nodes[0]  # convenience reference to the node
        node.add_p2p_connection(P2PDataStore())

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase")

        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block1], node, True)

        self.log.info("Mature the block.")
        node.generate(100)

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        # Use merkle-root malleability to generate an invalid block with
        # same blockheader.
        # Manufacture a block with 3 transactions (coinbase, spend of prior
        # coinbase, spend of that spend).  Duplicate the 3rd transaction to
        # leave merkle root and blockheader unchanged but invalidate the block.
        self.log.info("Test merkle root malleability.")

        block2 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1

        # b'0x51' is OP_TRUE
        tx1 = create_tx_with_script(block1.vtx[0],
                                    0,
                                    script_sig=b'\x51',
                                    amount=50 * COIN)
        tx2 = create_tx_with_script(tx1,
                                    0,
                                    script_sig=b'\x51',
                                    amount=50 * COIN)

        block2.vtx.extend([tx1, tx2])
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.rehash()
        block2.solve()
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        block2.vtx.append(tx2)
        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
        assert_equal(orig_hash, block2.rehash())
        assert (block2_orig.vtx != block2.vtx)

        node.p2p.send_blocks_and_test([block2], node, False, False, 16,
                                      b'bad-txns-duplicate')

        # Check transactions for duplicate inputs
        self.log.info("Test duplicate input block.")

        block2_orig.vtx[2].vin.append(block2_orig.vtx[2].vin[0])
        block2_orig.vtx[2].rehash()
        block2_orig.hashMerkleRoot = block2_orig.calc_merkle_root()
        block2_orig.rehash()
        block2_orig.solve()
        node.p2p.send_blocks_and_test(
            [block2_orig],
            node,
            success=False,
            request_block=False,
            reject_reason=b'bad-txns-inputs-duplicate')

        self.log.info("Test very broken block.")

        block3 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1
        block3.vtx[0].vout[0].nValue = 100 * COIN  # Too high!
        block3.vtx[0].sha256 = None
        block3.vtx[0].calc_sha256()
        block3.hashMerkleRoot = block3.calc_merkle_root()
        block3.rehash()
        block3.solve()

        node.p2p.send_blocks_and_test([block3], node, False, False, 16,
                                      b'bad-cb-amount')
    def run_test(self):
        # Setup the p2p connections
        # test_node connects to node0 (not whitelisted)
        test_node = self.nodes[0].add_p2p_connection(P2PInterface())
        # min_work_node connects to node1 (whitelisted)
        min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())

        # 1. Have nodes mine a block (leave IBD)
        [ n.generate(1) for n in self.nodes ]
        tips = [ int("0x" + n.getbestblockhash(), 0) for n in self.nodes ]

        # 2. Send one block that builds on each tip.
        # This should be accepted by node0
        blocks_h2 = []  # the height 2 blocks on each node's chain
        block_time = int(time.time()) + 1
        for i in range(2):
            blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
            blocks_h2[i].solve()
            block_time += 1
        test_node.send_message(msg_block(blocks_h2[0]))
        min_work_node.send_message(msg_block(blocks_h2[1]))

        for x in [test_node, min_work_node]:
            x.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        assert_equal(self.nodes[1].getblockcount(), 1)
        self.log.info("First height 2 block accepted by node0; correctly rejected by node1")

        # 3. Send another block that builds on genesis.
        block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time)
        block_time += 1
        block_h1f.solve()
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h1f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash)

        # 4. Send another two block that build on the fork.
        block_h2f = create_block(block_h1f.sha256, create_coinbase(2), block_time)
        block_time += 1
        block_h2f.solve()
        test_node.send_message(msg_block(block_h2f))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h2f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)

        # But this block should be accepted by node since it has equal work.
        self.nodes[0].getblock(block_h2f.hash)
        self.log.info("Second height 2 block accepted, but not reorg'ed to")

        # 4b. Now send another block that builds on the forking chain.
        block_h3 = create_block(block_h2f.sha256, create_coinbase(3), block_h2f.nTime+1)
        block_h3.solve()
        test_node.send_message(msg_block(block_h3))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h3.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)
        self.nodes[0].getblock(block_h3.hash)

        # But this block should be accepted by node since it has more work.
        self.nodes[0].getblock(block_h3.hash)
        self.log.info("Unrequested more-work block accepted")

        # 4c. Now mine 288 more blocks and deliver; all should be processed but
        # the last (height-too-high) on node (as long as it is not missing any headers)
        tip = block_h3
        all_blocks = []
        for i in range(288):
            next_block = create_block(tip.sha256, create_coinbase(i + 4), tip.nTime+1)
            next_block.solve()
            all_blocks.append(next_block)
            tip = next_block

        # Now send the block at height 5 and check that it wasn't accepted (missing header)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash)
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash)

        # The block at height 5 should be accepted if we provide the missing header, though
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(all_blocks[0]))
        test_node.send_message(headers_message)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        self.nodes[0].getblock(all_blocks[1].hash)

        # Now send the blocks in all_blocks
        for i in range(288):
            test_node.send_message(msg_block(all_blocks[i]))
        test_node.sync_with_ping()

        # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
        for x in all_blocks[:-1]:
            self.nodes[0].getblock(x.hash)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)

        # 5. Test handling of unrequested block on the node that didn't process
        # Should still not be processed (even though it has a child that has more
        # work).

        # The node should have requested the blocks at some point, so
        # disconnect/reconnect first

        self.nodes[0].disconnect_p2ps()
        self.nodes[1].disconnect_p2ps()

        test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        self.log.info("Unrequested block that would complete more-work chain was ignored")

        # 6. Try to get node to request the missing block.
        # Poke the node with an inv for block at height 3 and see if that
        # triggers a getdata on block 2 (it should if block 2 is missing).
        with mininode_lock:
            # Clear state so we can check the getdata request
            test_node.last_message.pop("getdata", None)
            test_node.send_message(msg_inv([CInv(2, block_h3.sha256)]))

        test_node.sync_with_ping()
        with mininode_lock:
            getdata = test_node.last_message["getdata"]

        # Check that the getdata includes the right block
        assert_equal(getdata.inv[0].hash, block_h1f.sha256)
        self.log.info("Inv at tip triggered getdata for unprocessed block")

        # 7. Send the missing block for the third time (now it is requested)
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 290)
        self.nodes[0].getblock(all_blocks[286].hash)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash)
        self.log.info("Successfully reorged to longer chain from non-whitelisted peer")

        # 8. Create a chain which is invalid at a height longer than the
        # current chain, but which has more blocks on top of that
        block_289f = create_block(all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime+1)
        block_289f.solve()
        block_290f = create_block(block_289f.sha256, create_coinbase(290), block_289f.nTime+1)
        block_290f.solve()
        block_291 = create_block(block_290f.sha256, create_coinbase(291), block_290f.nTime+1)
        # block_291 spends a coinbase below maturity!
        block_291.vtx.append(create_tx_with_script(block_290f.vtx[0], 0, script_sig=b"42", amount=1))
        block_291.hashMerkleRoot = block_291.calc_merkle_root()
        block_291.solve()
        block_292 = create_block(block_291.sha256, create_coinbase(292), block_291.nTime+1)
        block_292.solve()

        # Now send all the headers on the chain and enough blocks to trigger reorg
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_289f))
        headers_message.headers.append(CBlockHeader(block_290f))
        headers_message.headers.append(CBlockHeader(block_291))
        headers_message.headers.append(CBlockHeader(block_292))
        test_node.send_message(headers_message)

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_292.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_292.hash)

        test_node.send_message(msg_block(block_289f))
        test_node.send_message(msg_block(block_290f))

        test_node.sync_with_ping()
        self.nodes[0].getblock(block_289f.hash)
        self.nodes[0].getblock(block_290f.hash)

        test_node.send_message(msg_block(block_291))

        # At this point we've sent an obviously-bogus block, wait for full processing
        # without assuming whether we will be disconnected or not
        try:
            # Only wait a short while so the test doesn't take forever if we do get
            # disconnected
            test_node.sync_with_ping(timeout=1)
        except AssertionError:
            test_node.wait_for_disconnect()

            self.nodes[0].disconnect_p2ps()
            test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        # We should have failed reorg and switched back to 290 (but have block 291)
        assert_equal(self.nodes[0].getblockcount(), 290)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"], -1)

        # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected
        block_293 = create_block(block_292.sha256, create_coinbase(293), block_292.nTime+1)
        block_293.solve()
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_293))
        test_node.send_message(headers_message)
        test_node.wait_for_disconnect()

        # 9. Connect node1 to node0 and ensure it is able to sync
        connect_nodes(self.nodes[0], 1)
        sync_blocks([self.nodes[0], self.nodes[1]])
        self.log.info("Successfully synced nodes 1 and 0")
 def create_always_valid_chained_tx(spend):
     tx = create_tx_with_script(spend.tx, spend.n, b'',
                                spend.tx.vout[0].nValue - 1000,
                                CScript([OP_TRUE]))
     tx.rehash()
     return tx, PreviousSpendableOutput(tx, 0)