def activateCSV(self):
     # activation should happen at block height 432 (3 periods)
     # getblockchaininfo will show CSV as active at block 431 (144 * 3 -1) since it's returning whether CSV is active for the next block.
     min_activation_height = 432
     height = self.nodes[0].getblockcount()
     assert_greater_than(min_activation_height - height, 2)
     self.nodes[0].generate(min_activation_height - height - 2)
     assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "locked_in")
     self.nodes[0].generate(1)
     assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "active")
     sync_blocks(self.nodes)
    def run_test(self):

        # First, quick check that CSV is ACTIVE at genesis
        assert_equal(self.nodes[0].getblockcount(), 0)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        self.nodes[0].add_p2p_connection(P2PInterface())

        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that blocks past the genesis block must be at least version 4")

        # Create a v3 block
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(1), block_time)
        block.nVersion = 3
        block.solve()

        # The best block should not have changed, because...
        assert_equal(self.nodes[0].getbestblockhash(), tip)

        # ... we rejected it because it is v3
        with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]):
            # Send it to the node
            self.nodes[0].p2p.send_and_ping(msg_block(block))

        self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted")

        # Generate 100 blocks so that first coinbase matures
        generated_blocks = self.nodes[0].generate(100)
        spendable_coinbase_txid = self.nodes[0].getblock(generated_blocks[0])['tx'][0]
        coinbase_value = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(spendable_coinbase_txid)["hex"])["vout"][0]["value"]
        tip = generated_blocks[-1]

        # Construct a v4 block
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(len(generated_blocks) + 1), block_time)
        block.nVersion = 4

        # Create a CLTV transaction
        spendtx = create_transaction(self.nodes[0], spendable_coinbase_txid,
                self.nodeaddress, amount=1.0, fee=coinbase_value-1)
        spendtx = cltv_validate(self.nodes[0], spendtx, 1)
        spendtx.rehash()

        # Add the CLTV transaction and prepare for sending
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        # Send block and check that it becomes new best block
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
示例#3
0
    def run_test(self):
        # Setup the p2p connections
        self.segwit_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=2))
        self.old_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=1), services=NODE_NETWORK)
        self.additional_segwit_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=2))

        # We will need UTXOs to construct transactions in later tests.
        self.make_utxos()

        assert_equal(get_bip9_status(self.nodes[0], "segwit")["status"], 'active')

        self.log.info("Testing SENDCMPCT p2p message... ")
        self.test_sendcmpct(self.segwit_node, old_node=self.old_node)
        self.test_sendcmpct(self.additional_segwit_node)

        self.log.info("Testing compactblock construction...")
        self.test_compactblock_construction(self.old_node)
        self.test_compactblock_construction(self.segwit_node)

        self.log.info("Testing compactblock requests (segwit node)... ")
        self.test_compactblock_requests(self.segwit_node)

        self.log.info("Testing getblocktxn requests (segwit node)...")
        self.test_getblocktxn_requests(self.segwit_node)

        self.log.info("Testing getblocktxn handler (segwit node should return witnesses)...")
        self.test_getblocktxn_handler(self.segwit_node)
        self.test_getblocktxn_handler(self.old_node)

        self.log.info("Testing compactblock requests/announcements not at chain tip...")
        self.test_compactblocks_not_at_tip(self.segwit_node)
        self.test_compactblocks_not_at_tip(self.old_node)

        self.log.info("Testing handling of incorrect blocktxn responses...")
        self.test_incorrect_blocktxn_response(self.segwit_node)

        self.log.info("Testing reconstructing compact blocks from all peers...")
        self.test_compactblock_reconstruction_multiple_peers(self.segwit_node, self.additional_segwit_node)

        # Test that if we submitblock to node1, we'll get a compact block
        # announcement to all peers.
        # (Post-segwit activation, blocks won't propagate from node0 to node1
        # automatically, so don't bother testing a block announced to node0.)
        self.log.info("Testing end-to-end block relay...")
        self.request_cb_announcements(self.old_node)
        self.request_cb_announcements(self.segwit_node)
        self.test_end_to_end_block_relay([self.segwit_node, self.old_node])

        self.log.info("Testing handling of invalid compact blocks...")
        self.test_invalid_tx_in_compactblock(self.segwit_node)
        self.test_invalid_tx_in_compactblock(self.old_node)

        self.log.info("Testing invalid index in cmpctblock message...")
        self.test_invalid_cmpctblock_message()
    def test_bip68_not_consensus(self):
        assert(get_bip9_status(self.nodes[0], 'csv')['status'] != 'active')
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)

        tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
        tx1.rehash()

        # Make an anyone-can-spend transaction
        tx2 = CTransaction()
        tx2.nVersion = 1
        tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
        tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]

        # sign tx2
        tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
        tx2 = FromHex(tx2, tx2_raw)
        tx2.rehash()

        self.nodes[0].sendrawtransaction(ToHex(tx2))

        # Now make an invalid spend of tx2 according to BIP68
        sequence_value = 100 # 100 block relative locktime

        tx3 = CTransaction()
        tx3.nVersion = 2
        tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)]
        tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee * COIN), CScript([b'a' * 35]))]
        tx3.rehash()

        assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3))

        # make a block that violates bip68; ensure that the tip updates
        tip = int(self.nodes[0].getbestblockhash(), 16)
        block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1))
        block.nVersion = 3
        block.vtx.extend([tx1, tx2, tx3])
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        add_witness_commitment(block)
        block.solve()

        self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PDataStore())

        self.log.info("Generate blocks in the past for coinbase outputs.")
        long_past_time = int(time.time()) - 600 * 1000  # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(long_past_time - 100)  # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2 * 32 + 1)  # 82 blocks generated for inputs
        self.nodes[0].setmocktime(0)  # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82  # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that the csv softfork is DEFINED")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
        test_blocks = self.generate_blocks(61, 4)
        self.sync_blocks(test_blocks)

        self.log.info("Advance from DEFINED to STARTED, height = 143")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Fail to achieve LOCKED_IN")
        # 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks

        test_blocks = self.generate_blocks(50, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(20, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(24, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Failed to advance past STARTED, height = 287")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Generate blocks to achieve LOCK-IN")
        # 108 out of 144 signal bit 0 to achieve lock-in
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(58, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(26, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(10, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Generate 140 more version 4 blocks
        test_blocks = self.generate_blocks(140, 4)
        self.sync_blocks(test_blocks)

        # Inputs at height = 572
        #
        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for i in range(16):
            bip68inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))

        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
            bip112basicinputs.append(inputs)

        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
            bip112diverseinputs.append(inputs)

        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)

        # 1 normal input
        bip113input = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)

        self.nodes[0].setmocktime(self.last_block_time + 600)
        inputblockhash = self.nodes[0].generate(1)[0]  # 1 block generated for inputs to be in chain at height 572
        self.nodes[0].setmocktime(0)
        self.tip = int(inputblockhash, 16)
        self.tipheight += 1
        self.last_block_time += 600
        assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(2, 4)
        self.sync_blocks(test_blocks)

        self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress)
        bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress)
        bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1)
        bip112txs_vary_nSequence_9_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress)
        bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1)
        bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress)
        bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress)

        self.log.info("TESTING")

        self.log.info("Pre-Soft Fork Tests. All txs should pass.")
        self.log.info("Test version 1 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
        test_blocks = self.generate_blocks(1, 4)
        self.sync_blocks(test_blocks)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        self.log.info("Post-Soft Fork Tests.")

        self.log.info("BIP 113 tests")
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])], success=False)
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])])
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 580 after 4 blocks of random version
        test_blocks = self.generate_blocks(4, 1234)
        self.sync_blocks(test_blocks)

        self.log.info("BIP 68 tests")
        self.log.info("Test version 1 txs - all should still pass")

        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
        for tx in bip68timetxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 581
        test_blocks = self.generate_blocks(1, 1234)
        self.sync_blocks(test_blocks)

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 582
        test_blocks = self.generate_blocks(1, 1234)
        self.sync_blocks(test_blocks)

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("BIP 112 tests")
        self.log.info("Test version 1 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass

        success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
        fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        self.log.info("Test version 2 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]

        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##

        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If sequencelock types mismatch, tx should fail
        fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Remaining txs should pass, just test masking works properly
        success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]:
            tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
            signtx = sign_transaction(self.nodes[0], tx)
            time_txs.append(signtx)

        self.sync_blocks([self.create_test_block(time_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
示例#6
0
 def activate_segwit(self, node):
     node.generate(144*3)
     assert_equal(get_bip9_status(node, "segwit")["status"], 'active')
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PDataStore())

        self.log.info("Generate blocks in the past for coinbase outputs.")
        long_past_time = int(
            time.time()
        ) - 600 * 1000  # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(
            long_past_time - 100
        )  # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(
            1 + 16 + 2 * 32 + 1)  # 82 blocks generated for inputs
        self.nodes[0].setmocktime(
            0
        )  # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82  # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that the csv softfork is DEFINED")
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
        test_blocks = self.generate_blocks(61, 0x20000000)
        self.sync_blocks(test_blocks)

        self.log.info("Advance from DEFINED to STARTED, height = 143")
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Fail to achieve LOCKED_IN")
        # 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks

        test_blocks = self.generate_blocks(
            50, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(
            20, 0x20000000, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(
            50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(
            24, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Failed to advance past STARTED, height = 287")
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Generate blocks to achieve LOCK-IN")
        # 108 out of 144 signal bit 0 to achieve lock-in
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(
            58, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(
            26, 0x20000000, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(
            50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(
            10, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Generate 140 more version 4 blocks
        test_blocks = self.generate_blocks(140, 0x20000000)
        self.sync_blocks(test_blocks)

        # Inputs at height = 572
        #
        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for i in range(16):
            bip68inputs.append(
                send_generic_input_tx(self.nodes[0], self.coinbase_blocks,
                                      self.nodeaddress))

        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(
                    send_generic_input_tx(self.nodes[0], self.coinbase_blocks,
                                          self.nodeaddress))
            bip112basicinputs.append(inputs)

        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(
                    send_generic_input_tx(self.nodes[0], self.coinbase_blocks,
                                          self.nodeaddress))
            bip112diverseinputs.append(inputs)

        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = send_generic_input_tx(self.nodes[0],
                                                   self.coinbase_blocks,
                                                   self.nodeaddress)

        # 1 normal input
        bip113input = send_generic_input_tx(self.nodes[0],
                                            self.coinbase_blocks,
                                            self.nodeaddress)

        self.nodes[0].setmocktime(self.last_block_time + 600)
        inputblockhash = self.nodes[0].generate(1)[
            0]  # 1 block generated for inputs to be in chain at height 572
        self.nodes[0].setmocktime(0)
        self.tip = int(inputblockhash, 16)
        self.tipheight += 1
        self.last_block_time += 600
        assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]),
                     82 + 1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(2, 0x20000000)
        self.sync_blocks(test_blocks)

        self.log.info(
            "Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)"
        )
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = create_transaction(self.nodes[0],
                                         bip113input,
                                         self.nodeaddress,
                                         amount=Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = create_transaction(self.nodes[0],
                                         bip113input,
                                         self.nodeaddress,
                                         amount=Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1,
                                      self.nodeaddress)
        bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2,
                                      self.nodeaddress)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0],
                                                       bip112basicinputs[0],
                                                       False, 1,
                                                       self.nodeaddress)
        bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0],
                                                       bip112basicinputs[0],
                                                       False, 2,
                                                       self.nodeaddress)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = create_bip112txs(
            self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress,
            -1)
        bip112txs_vary_nSequence_9_v2 = create_bip112txs(
            self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress,
            -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0],
                                                    bip112diverseinputs[0],
                                                    True, 1, self.nodeaddress)
        bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0],
                                                    bip112diverseinputs[0],
                                                    True, 2, self.nodeaddress)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0],
                                                      bip112diverseinputs[1],
                                                      True, 1,
                                                      self.nodeaddress, -1)
        bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0],
                                                      bip112diverseinputs[1],
                                                      True, 2,
                                                      self.nodeaddress, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = create_bip112special(self.nodes[0],
                                                   bip112specialinput, 1,
                                                   self.nodeaddress)
        bip112tx_special_v2 = create_bip112special(self.nodes[0],
                                                   bip112specialinput, 2,
                                                   self.nodeaddress)

        self.log.info("TESTING")

        self.log.info("Pre-Soft Fork Tests. All txs should pass.")
        self.log.info("Test version 1 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
        test_blocks = self.generate_blocks(1, 0x20000000)
        self.sync_blocks(test_blocks)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        self.log.info("Post-Soft Fork Tests.")

        self.log.info("BIP 113 tests")
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])],
                             success=False)
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])])
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 580 after 4 blocks of random version
        test_blocks = self.generate_blocks(4, 0x20000000)
        self.sync_blocks(test_blocks)

        self.log.info("BIP 68 tests")
        self.log.info("Test version 1 txs - all should still pass")

        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = [
            tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']
        ]
        for tx in bip68timetxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        bip68heighttxs = [
            tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']
        ]
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 581
        test_blocks = self.generate_blocks(1, 0x20000000)
        self.sync_blocks(test_blocks)

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 582
        test_blocks = self.generate_blocks(1, 0x20000000)
        self.sync_blocks(test_blocks)

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("BIP 112 tests")
        self.log.info("Test version 1 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v1])],
                         success=False)
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass

        success_txs = [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']
        ]
        success_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']
        ]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
        fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']
        ]
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']
        ]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        self.log.info("Test version 2 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v2])],
                         success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']
        ]
        success_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']
        ]

        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##

        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']
        ]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = [
            tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']
        ]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If sequencelock types mismatch, tx should fail
        fail_txs = [
            tx['tx'] for tx in bip112txs_vary_nSequence_v2
            if not tx['sdf'] and tx['stf']
        ]
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v2
            if not tx['sdf'] and tx['stf']
        ]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Remaining txs should pass, just test masking works properly
        success_txs = [
            tx['tx'] for tx in bip112txs_vary_nSequence_v2
            if not tx['sdf'] and not tx['stf']
        ]
        success_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v2
            if not tx['sdf'] and not tx['stf']
        ]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for tx in [
                tx['tx'] for tx in bip112txs_vary_OP_CSV_v2
                if not tx['sdf'] and tx['stf']
        ]:
            tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
            signtx = sign_transaction(self.nodes[0], tx)
            time_txs.append(signtx)

        self.sync_blocks([self.create_test_block(time_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
示例#8
0
 def activate_segwit(self, node):
     node.generate(144*3)
     assert_equal(get_bip9_status(node, "segwit")["status"], 'active')
    def run_test(self):
        self.log.info("Wait for DIP3 to activate")
        while get_bip9_status(self.nodes[0], 'dip0003')['status'] != 'active':
            self.bump_mocktime(10)
            self.nodes[0].generate(10)

        self.nodes[0].add_p2p_connection(P2PDataStore())
        network_thread_start()
        self.nodes[0].p2p.wait_for_verack()

        self.log.info("Mine all but one remaining block in the window")
        bi = self.nodes[0].getblockchaininfo()
        for i in range(498 - bi['blocks']):
            self.bump_mocktime(1)
            self.nodes[0].generate(1)

        self.log.info("Initial state is DEFINED")
        bi = self.nodes[0].getblockchaininfo()
        assert_equal(bi['blocks'], 498)
        assert_equal(bi['bip9_softforks']['realloc']['status'], 'defined')

        self.log.info("Advance from DEFINED to STARTED at height = 499")
        self.nodes[0].generate(1)
        bi = self.nodes[0].getblockchaininfo()
        assert_equal(bi['blocks'], 499)
        assert_equal(bi['bip9_softforks']['realloc']['status'], 'started')
        assert_equal(
            bi['bip9_softforks']['realloc']['statistics']['threshold'],
            self.threshold(0))

        self.signal(399, False)  # 1 block short

        self.log.info(
            "Still STARTED but new threshold should be lower at height = 999")
        bi = self.nodes[0].getblockchaininfo()
        assert_equal(bi['blocks'], 999)
        assert_equal(
            bi['bip9_softforks']['realloc']['statistics']['threshold'],
            self.threshold(1))

        self.signal(398, False)  # 1 block short again

        self.log.info(
            "Still STARTED but new threshold should be even lower at height = 1499"
        )
        bi = self.nodes[0].getblockchaininfo()
        assert_equal(bi['blocks'], 1499)
        assert_equal(
            bi['bip9_softforks']['realloc']['statistics']['threshold'],
            self.threshold(2))
        pre_locked_in_blockhash = bi['bestblockhash']

        self.signal(396, True)  # just enough to lock in
        self.log.info("Advanced to LOCKED_IN at height = 1999")

        for i in range(49):
            self.bump_mocktime(10)
            self.nodes[0].generate(10)
        self.nodes[0].generate(9)

        self.log.info("Still LOCKED_IN at height = 2498")
        bi = self.nodes[0].getblockchaininfo()
        assert_equal(bi['blocks'], 2498)
        assert_equal(bi['bip9_softforks']['realloc']['status'], 'locked_in')

        self.log.info("Advance from LOCKED_IN to ACTIVE at height = 2499")
        self.nodes[0].generate(1)  # activation
        bi = self.nodes[0].getblockchaininfo()
        assert_equal(bi['blocks'], 2499)
        assert_equal(bi['bip9_softforks']['realloc']['status'], 'active')
        assert_equal(bi['bip9_softforks']['realloc']['since'], 2500)

        self.log.info(
            "Reward split should stay ~50/50 before the first superblock after activation"
        )
        # This applies even if reallocation was activated right at superblock height like it does here
        bt = self.nodes[0].getblocktemplate()
        assert_equal(bt['height'], 2500)
        assert_equal(
            bt['masternode'][0]['amount'],
            get_masternode_payment(bt['height'], bt['coinbasevalue'], 2500))
        self.nodes[0].generate(9)
        bt = self.nodes[0].getblocktemplate()
        assert_equal(
            bt['masternode'][0]['amount'],
            get_masternode_payment(bt['height'], bt['coinbasevalue'], 2500))
        assert_equal(bt['coinbasevalue'], 13748571607)
        assert_equal(bt['masternode'][0]['amount'], 6874285801)  # 0.4999999998

        self.log.info(
            "Reallocation should kick-in with the superblock mined at height = 2010"
        )
        for period in range(
                19):  # there will be 19 adjustments, 3 superblocks long each
            for i in range(3):
                self.bump_mocktime(10)
                self.nodes[0].generate(10)
                bt = self.nodes[0].getblocktemplate()
                assert_equal(
                    bt['masternode'][0]['amount'],
                    get_masternode_payment(bt['height'], bt['coinbasevalue'],
                                           2500))

        self.log.info(
            "Reward split should reach ~60/40 after reallocation is done")
        assert_equal(bt['coinbasevalue'], 10221599170)
        assert_equal(bt['masternode'][0]['amount'], 6132959502)  # 0.6

        self.log.info(
            "Reward split should stay ~60/40 after reallocation is done")
        for period in range(10):  # check 10 next superblocks
            self.bump_mocktime(10)
            self.nodes[0].generate(10)
            bt = self.nodes[0].getblocktemplate()
            assert_equal(
                bt['masternode'][0]['amount'],
                get_masternode_payment(bt['height'], bt['coinbasevalue'],
                                       2500))
        assert_equal(bt['coinbasevalue'], 9491484944)
        assert_equal(bt['masternode'][0]['amount'], 5694890966)  # 0.6

        # make sure all nodes are still synced
        self.sync_all()

        self.log.info("Rollback the chain back to the STARTED state")
        self.mocktime = self.nodes[0].getblock(pre_locked_in_blockhash,
                                               1)['time']
        for node in self.nodes:
            node.invalidateblock(pre_locked_in_blockhash)
        # create and send non-signalling block
        test_block = self.create_test_block()
        self.nodes[0].submitblock(ToHex(test_block))
        bi = self.nodes[0].getblockchaininfo()
        assert_equal(bi['blocks'], 1499)
        assert_equal(bi['bip9_softforks']['realloc']['status'], 'started')
        assert_equal(
            bi['bip9_softforks']['realloc']['statistics']['threshold'],
            self.threshold(2))

        self.log.info("Check thresholds reach min level and stay there")
        for i in range(
                8
        ):  # 7 to reach min level and 1 more to check it doesn't go lower than that
            self.signal(0, False)  # no need to signal
            bi = self.nodes[0].getblockchaininfo()
            assert_equal(bi['blocks'], 1999 + i * 500)
            assert_equal(bi['bip9_softforks']['realloc']['status'], 'started')
            assert_equal(
                bi['bip9_softforks']['realloc']['statistics']['threshold'],
                self.threshold(i + 3))
        assert_equal(
            bi['bip9_softforks']['realloc']['statistics']['threshold'], 300)
示例#10
0
    def create_block(self, node, vtx=[]):
        bt = node.getblocktemplate()
        height = bt['height']
        tip_hash = bt['previousblockhash']

        coinbasevalue = bt['coinbasevalue']
        miner_address = node.getnewaddress()
        mn_payee = bt['masternode'][0]['payee']

        # calculate fees that the block template included (we'll have to remove it from the coinbase as we won't
        # include the template's transactions
        bt_fees = 0
        for tx in bt['transactions']:
            bt_fees += tx['fee']

        new_fees = 0
        for tx in vtx:
            in_value = 0
            out_value = 0
            for txin in tx.vin:
                txout = node.gettxout("%064x" % txin.prevout.hash,
                                      txin.prevout.n, False)
                in_value += int(txout['value'] * COIN)
            for txout in tx.vout:
                out_value += txout.nValue
            new_fees += in_value - out_value

        # fix fees
        coinbasevalue -= bt_fees
        coinbasevalue += new_fees

        realloc_info = get_bip9_status(self.nodes[0], 'realloc')
        realloc_height = 99999999
        if realloc_info['status'] == 'active':
            realloc_height = realloc_info['since']
        mn_amount = get_masternode_payment(height, coinbasevalue,
                                           realloc_height)
        miner_amount = coinbasevalue - mn_amount

        outputs = {miner_address: str(Decimal(miner_amount) / COIN)}
        if mn_amount > 0:
            outputs[mn_payee] = str(Decimal(mn_amount) / COIN)

        coinbase = FromHex(CTransaction(),
                           node.createrawtransaction([], outputs))
        coinbase.vin = create_coinbase(height).vin

        # We can't really use this one as it would result in invalid merkle roots for masternode lists
        if len(bt['coinbase_payload']) != 0:
            cbtx = FromHex(CCbTx(version=1), bt['coinbase_payload'])
            coinbase.nVersion = 3
            coinbase.nType = 5  # CbTx
            coinbase.vExtraPayload = cbtx.serialize()

        coinbase.calc_sha256()

        block = create_block(int(tip_hash, 16), coinbase, nTime=bt['curtime'])
        block.vtx += vtx

        # Add quorum commitments from template
        for tx in bt['transactions']:
            tx2 = FromHex(CTransaction(), tx['data'])
            if tx2.nType == 6:
                block.vtx.append(tx2)

        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()
        return block
示例#11
0
    def run_test(self):
        # Setup the p2p connections
        self.segwit_node = self.nodes[0].add_p2p_connection(
            TestP2PConn(cmpct_version=2))
        self.old_node = self.nodes[0].add_p2p_connection(
            TestP2PConn(cmpct_version=1), services=NODE_NETWORK)
        self.additional_segwit_node = self.nodes[0].add_p2p_connection(
            TestP2PConn(cmpct_version=2))

        # We will need UTXOs to construct transactions in later tests.
        self.make_utxos()

        assert_equal(
            get_bip9_status(self.nodes[0], "segwit")["status"], 'active')

        self.log.info("Testing SENDCMPCT p2p message... ")
        self.test_sendcmpct(self.segwit_node, old_node=self.old_node)
        self.test_sendcmpct(self.additional_segwit_node)

        self.log.info("Testing compactblock construction...")
        self.test_compactblock_construction(self.old_node)
        self.test_compactblock_construction(self.segwit_node)

        self.log.info("Testing compactblock requests (segwit node)... ")
        self.test_compactblock_requests(self.segwit_node)

        self.log.info("Testing getblocktxn requests (segwit node)...")
        self.test_getblocktxn_requests(self.segwit_node)

        self.log.info(
            "Testing getblocktxn handler (segwit node should return witnesses)..."
        )
        self.test_getblocktxn_handler(self.segwit_node)
        self.test_getblocktxn_handler(self.old_node)

        self.log.info(
            "Testing compactblock requests/announcements not at chain tip...")
        self.test_compactblocks_not_at_tip(self.segwit_node)
        self.test_compactblocks_not_at_tip(self.old_node)

        self.log.info("Testing handling of incorrect blocktxn responses...")
        self.test_incorrect_blocktxn_response(self.segwit_node)

        self.log.info(
            "Testing reconstructing compact blocks from all peers...")
        self.test_compactblock_reconstruction_multiple_peers(
            self.segwit_node, self.additional_segwit_node)

        # Test that if we submitblock to node1, we'll get a compact block
        # announcement to all peers.
        # (Post-segwit activation, blocks won't propagate from node0 to node1
        # automatically, so don't bother testing a block announced to node0.)
        self.log.info("Testing end-to-end block relay...")
        self.request_cb_announcements(self.old_node)
        self.request_cb_announcements(self.segwit_node)
        self.test_end_to_end_block_relay([self.segwit_node, self.old_node])

        self.log.info("Testing handling of invalid compact blocks...")
        self.test_invalid_tx_in_compactblock(self.segwit_node)
        self.test_invalid_tx_in_compactblock(self.old_node)

        self.log.info("Testing invalid index in cmpctblock message...")
        self.test_invalid_cmpctblock_message()
    def get_tests(self):
        long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
        self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82 # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
        self.nodeaddress = self.nodes[0].getnewaddress()

        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
        test_blocks = self.generate_blocks(61, 4)
        yield TestInstance(test_blocks, sync_every_block=False) # 1
        # Advanced from DEFINED to STARTED, height = 143
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
        yield TestInstance(test_blocks, sync_every_block=False) # 2
        # Failed to advance past STARTED, height = 287
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        # 108 out of 144 signal bit 0 to achieve lock-in
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
        yield TestInstance(test_blocks, sync_every_block=False) # 3
        # Advanced from STARTED to LOCKED_IN, height = 431
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # 140 more version 4 blocks
        test_blocks = self.generate_blocks(140, 4)
        yield TestInstance(test_blocks, sync_every_block=False) # 4

        ### Inputs at height = 572
        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for _ in range(16):
            bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for _ in range(2):
            inputs = []
            for _ in range(16):
                inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
            bip112basicinputs.append(inputs)
        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for _ in range(2):
            inputs = []
            for _ in range(16):
                inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
            bip112diverseinputs.append(inputs)
        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
        # 1 normal input
        bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)

        self.nodes[0].setmocktime(self.last_block_time + 600)
        inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
        self.nodes[0].setmocktime(0)
        self.tip = int("0x" + inputblockhash, 0)
        self.tipheight += 1
        self.last_block_time += 600
        assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(2, 4)
        yield TestInstance(test_blocks, sync_every_block=False) # 5
        # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
        bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
        bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
        bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
        bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
        bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
        bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)


        ### TESTING ###
        ##################################
        ### Before Soft Forks Activate ###
        ##################################
        # All txs should pass
        ### Version 1 txs ###
        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ### Version 2 txs ###
        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())


        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
        test_blocks = self.generate_blocks(1, 4)
        yield TestInstance(test_blocks, sync_every_block=False) # 8
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')


        #################################
        ### After Soft Forks Activate ###
        #################################
        ### BIP 113 ###
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 580 after 4 blocks of random version
        test_blocks = self.generate_blocks(4, 1234)
        yield TestInstance(test_blocks, sync_every_block=False) # 13

        ### BIP 68 ###
        ### Version 1 txs ###
        # All still pass
        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ### Version 2 txs ###
        bip68success_txs = []
        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = []
        for b25 in range(2):
            for b18 in range(2):
                bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
        for tx in bip68timetxs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
        bip68heighttxs = []
        for b25 in range(2):
            for b18 in range(2):
                bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
        for tx in bip68heighttxs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23

        # Advance one block to 581
        test_blocks = self.generate_blocks(1, 1234)
        yield TestInstance(test_blocks, sync_every_block=False) # 24

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29

        # Advance one block to 582
        test_blocks = self.generate_blocks(1, 1234)
        yield TestInstance(test_blocks, sync_every_block=False) # 30

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())


        ### BIP 112 ###
        ### Version 1 txs ###
        # -1 OP_CSV tx should fail
        yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
        success_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
                    success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = []
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
                    fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])

        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81

        ### Version 2 txs ###
        # -1 OP_CSV tx should fail
        yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
                    success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9

        yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = []
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9

        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115

        # If sequencelock types mismatch, tx should fail
        fail_txs = []
        for b25 in range(2):
            for b18 in range(2):
                fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
                fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123

        # Remaining txs should pass, just test masking works properly
        success_txs = []
        for b25 in range(2):
            for b18 in range(2):
                success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
                success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for b25 in range(2):
            for b18 in range(2):
                tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
                tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
                signtx = self.sign_transaction(self.nodes[0], tx)
                time_txs.append(signtx)
        yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
示例#13
0
    def run_test(self):

        # First, quick check that CSV is ACTIVE at genesis
        assert_equal(self.nodes[0].getblockcount(), 0)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        self.nodes[0].add_p2p_connection(P2PInterface())

        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that blocks past the genesis block must be at least version 4"
        )

        # Create a v3 block
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(1), block_time)
        block.nVersion = 3
        block.solve()

        # The best block should not have changed, because...
        assert_equal(self.nodes[0].getbestblockhash(), tip)

        # ... we rejected it because it is v3
        with self.nodes[0].assert_debug_log(expected_msgs=[
                '{}, bad-version(0x00000003)'.format(block.hash)
        ]):
            # Send it to the node
            self.nodes[0].p2p.send_and_ping(msg_block(block))

        self.log.info(
            "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted"
        )

        # Generate 100 blocks so that first coinbase matures
        generated_blocks = self.nodes[0].generate(100)
        spendable_coinbase_txid = self.nodes[0].getblock(
            generated_blocks[0])['tx'][0]
        coinbase_value = self.nodes[0].decoderawtransaction(
            self.nodes[0].gettransaction(
                spendable_coinbase_txid)["hex"])["vout"][0]["value"]
        tip = generated_blocks[-1]

        # Construct a v4 block
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16),
                             create_coinbase(len(generated_blocks) + 1),
                             block_time)
        block.nVersion = 4

        # Create a CLTV transaction
        spendtx = create_transaction(self.nodes[0],
                                     spendable_coinbase_txid,
                                     self.nodeaddress,
                                     amount=1.0,
                                     fee=coinbase_value - 1)
        spendtx = cltv_validate(self.nodes[0], spendtx, 1)
        spendtx.rehash()

        # Add the CLTV transaction and prepare for sending
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        # Send block and check that it becomes new best block
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)