示例#1
0
 def sign_transaction(self, node, unsignedtx):
     rawtx = ToHex(unsignedtx)
     signresult = node.signrawtransaction(rawtx)
     tx = CTransaction()
     f = BytesIO(unhexlify(signresult["hex"]))
     tx.deserialize(f)
     return tx
示例#2
0
 def sign_transaction(self, node, unsignedtx):
     rawtx = ToHex(unsignedtx)
     signresult = node.signrawtransaction(rawtx)
     tx = CTransaction()
     f = cStringIO.StringIO(unhexlify(signresult['hex']))
     tx.deserialize(f)
     return tx
示例#3
0
 def sign_transaction(self, node, unsignedtx):
     rawtx = ToHex(unsignedtx)
     signresult = node.signrawtransaction(rawtx)
     tx = CTransaction()
     f = BytesIO(hex_str_to_bytes(signresult['hex']))
     tx.deserialize(f)
     return tx
示例#4
0
 def create_transaction(self, node, txid, to_address, amount):
     inputs = [{ "txid" : txid, "vout" : 0}]
     outputs = { to_address : amount }
     rawtx = node.createrawtransaction(inputs, outputs)
     tx = CTransaction()
     f = cStringIO.StringIO(unhexlify(rawtx))
     tx.deserialize(f)
     return tx
示例#5
0
 def create_transaction(self, node, txid, to_address, amount):
     inputs = [{ "txid" : txid, "vout" : 0}]
     outputs = { to_address : amount }
     rawtx = node.createrawtransaction(inputs, outputs)
     tx = CTransaction()
     f = BytesIO(hex_str_to_bytes(rawtx))
     tx.deserialize(f)
     return tx
 def create_transaction(self, node, txid, to_address, amount):
     inputs = [{"txid": txid, "vout": 0}]
     outputs = {to_address: amount}
     rawtx = node.createrawtransaction(inputs, outputs)
     signresult = node.signrawtransaction(rawtx, None, None, "ALL|FORKID")
     tx = CTransaction()
     f = BytesIO(hex_str_to_bytes(signresult['hex']))
     tx.deserialize(f)
     return tx
示例#7
0
 def create_transaction(self, node, coinbase, to_address, amount):
     from_txid = node.getblock(coinbase)['tx'][0]
     inputs = [{ "txid" : from_txid, "vout" : 0}]
     outputs = { to_address : amount }
     rawtx = node.createrawtransaction(inputs, outputs)
     tx = CTransaction()
     f = cStringIO.StringIO(unhexlify(rawtx))
     tx.deserialize(f)
     tx.nVersion = 2
     return tx
示例#8
0
 def create_transaction(self, node, coinbase, to_address, amount):
     from_txid = node.getblock(coinbase)["tx"][0]
     inputs = [{"txid": from_txid, "vout": 0}]
     outputs = {to_address: amount}
     rawtx = node.createrawtransaction(inputs, outputs)
     signresult = node.signrawtransaction(rawtx)
     tx = CTransaction()
     f = BytesIO(hex_str_to_bytes(signresult["hex"]))
     tx.deserialize(f)
     return tx
示例#9
0
def submit_block_with_tx(node, tx):
    ctx = CTransaction()
    ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx)))

    tip = node.getbestblockhash()
    height = node.getblockcount() + 1
    block_time = node.getblockheader(tip)["mediantime"] + 1
    block = blocktools.create_block(int(tip, 16), blocktools.create_coinbase(height), block_time)
    block.vtx.append(ctx)
    block.rehash()
    block.hashMerkleRoot = block.calc_merkle_root()
    block.solve()
    node.submitblock(bytes_to_hex_str(block.serialize(True)), '', True)
    return block
示例#10
0
 def create_and_mine_tx_from_txids(self, txids, success = True):
     tx = CTransaction()
     for i in txids:
         txtmp = CTransaction()
         txraw = self.nodes[0].getrawtransaction(i)
         f = BytesIO(hex_str_to_bytes(txraw))
         txtmp.deserialize(f)
         for j in range(len(txtmp.vout)):
             tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j)))
     tx.vout.append(CTxOut(0, CScript()))
     tx.rehash()
     signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
     self.nodes[0].sendrawtransaction(signresults, True)
     self.nodes[0].generate(1)
     sync_blocks(self.nodes)
示例#11
0
    def create_transaction(self, node, coinbase, to_address, amount, txModifier=None):
        from_txid = node.getblock(coinbase)['tx'][0]
        inputs = [{ "txid" : from_txid, "vout" : 0}]
        outputs = { to_address : amount }
        rawtx = node.createrawtransaction(inputs, outputs)
        tx = CTransaction()

        if txModifier:
            f = cStringIO.StringIO(unhexlify(rawtx))
            tx.deserialize(f)
            txModifier(tx)
            rawtx = hexlify(tx.serialize())

        signresult = node.signrawtransaction(rawtx)
        f = cStringIO.StringIO(unhexlify(signresult['hex']))
        tx.deserialize(f)
        return tx
示例#12
0
def create_transaction(node, coinbase, to_address, amount, expiry_height):
    from_txid = node.getblock(coinbase)['tx'][0]
    inputs = [{"txid": from_txid, "vout": 0}]
    outputs = {to_address: amount}
    rawtx = node.createrawtransaction(inputs, outputs)
    tx = CTransaction()

    # Set the expiry height
    f = cStringIO.StringIO(unhexlify(rawtx))
    tx.deserialize(f)
    tx.nExpiryHeight = expiry_height
    rawtx = hexlify(tx.serialize())

    signresult = node.signrawtransaction(rawtx)
    f = cStringIO.StringIO(unhexlify(signresult['hex']))
    tx.deserialize(f)
    tx.rehash()
    return tx
示例#13
0
    def _zmq_test(self):
        num_blocks = 5
        self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks})
        genhashes = self.nodes[0].generate(num_blocks)
        self.sync_all()

        for x in range(num_blocks):
            # Should receive the coinbase txid.
            txid = self.hashtx.receive()

            # Should receive the coinbase raw transaction.
            hex = self.rawtx.receive()
            tx = CTransaction()
            tx.deserialize(BytesIO(hex))
            tx.calc_sha256()
            assert_equal(tx.hash, bytes_to_hex_str(txid))

            # Should receive the generated block hash.
            hash = bytes_to_hex_str(self.hashblock.receive())
            assert_equal(genhashes[x], hash)
            # The block should only have the coinbase txid.
            assert_equal([bytes_to_hex_str(txid)], self.nodes[1].getblock(hash)["tx"])

            # Should receive the generated raw block.
            block = self.rawblock.receive()
            assert_equal(genhashes[x], bytes_to_hex_str(hash256(block[:80])))

        self.log.info("Wait for tx from second node")
        payment_txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
        self.sync_all()

        # Should receive the broadcasted txid.
        txid = self.hashtx.receive()
        assert_equal(payment_txid, bytes_to_hex_str(txid))

        # Should receive the broadcasted raw transaction.
        hex = self.rawtx.receive()
        assert_equal(payment_txid, bytes_to_hex_str(hash256(hex)))
def txFromHex(hexstring):
    tx = CTransaction()
    f = cStringIO.StringIO(binascii.unhexlify(hexstring))
    tx.deserialize(f)
    return tx
示例#15
0
    def run_test(self):
        blocks = []
        self.bl_count = 0

        blocks.append(self.nodes[0].getblockhash(0))

        small_target_h = 3

        self.mark_logs("Node0 generates %d blocks" %
                       (CBH_DELTA_HEIGHT + small_target_h - 1))
        blocks.extend(self.nodes[0].generate(CBH_DELTA_HEIGHT +
                                             small_target_h - 1))
        self.sync_all()

        amount_node1 = Decimal("1002.0")
        self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(),
                                    amount_node1)
        self.sync_all()

        self.mark_logs("Node0 generates 1 blocks")
        blocks.extend(self.nodes[0].generate(1))
        self.sync_all()

        self.mark_logs(
            "Trying to send a tx with a scriptPubKey referencing a block too recent..."
        )

        # Create a tx having in its scriptPubKey a custom referenced block in the CHECKBLOCKATHEIGHT part
        # select necessary utxos for doing the PAYMENT
        usp = self.nodes[0].listunspent()

        payment = Decimal('1.0')
        fee = Decimal('0.00005')

        amount = Decimal('0')
        inputs = []
        for x in usp:
            amount += Decimal(x['amount'])
            inputs.append({"txid": x['txid'], "vout": x['vout']})
            if amount >= payment + fee:
                break

        outputs = {
            self.nodes[0].getnewaddress(): (Decimal(amount) - payment - fee),
            self.nodes[2].getnewaddress(): payment
        }
        rawTx = self.nodes[0].createrawtransaction(inputs, outputs)

        # build an object from the raw tx in order to be able to modify it
        tx_01 = CTransaction()
        f = cStringIO.StringIO(unhexlify(rawTx))
        tx_01.deserialize(f)

        decodedScriptOrig = self.nodes[0].decodescript(
            binascii.hexlify(tx_01.vout[1].scriptPubKey))

        scriptOrigAsm = decodedScriptOrig['asm']

        # store the hashed script, it is reused
        params = scriptOrigAsm.split()
        hash160 = hex_str_to_bytes(params[2])

        # new referenced block height
        modTargetHeigth = CBH_DELTA_HEIGHT + small_target_h - FINALITY_MIN_AGE + 5

        # new referenced block hash
        modTargetHash = hex_str_to_bytes(swap_bytes(blocks[modTargetHeigth]))

        # build modified script
        modScriptPubKey = CScript([
            OP_DUP, OP_HASH160, hash160, OP_EQUALVERIFY, OP_CHECKSIG,
            modTargetHash, modTargetHeigth, OP_CHECKBLOCKATHEIGHT
        ])

        tx_01.vout[1].scriptPubKey = modScriptPubKey
        tx_01.rehash()

        decodedScriptMod = self.nodes[0].decodescript(
            binascii.hexlify(tx_01.vout[1].scriptPubKey))
        print "  Modified scriptPubKey in tx 1: ", decodedScriptMod['asm']

        signedRawTx = self.nodes[0].signrawtransaction(ToHex(tx_01))

        h = self.nodes[0].getblockcount()
        assert_greater_than(FINALITY_MIN_AGE, h - modTargetHeigth)

        print "  Node0 sends %f coins to Node2" % payment

        try:
            txid = self.nodes[0].sendrawtransaction(signedRawTx['hex'])
            print "  Tx sent: ", txid
            # should fail, therefore force test failure
            assert_equal(True, False)

        except JSONRPCException, e:
            print "  ==> tx has been rejected as expected:"
            print "      referenced block height=%d, chainActive.height=%d, minimumAge=%d" % (
                modTargetHeigth, h, FINALITY_MIN_AGE)
            print
示例#16
0
def txFromHex(hexstring):
    tx = CTransaction()
    f = BytesIO(hex_str_to_bytes(hexstring))
    tx.deserialize(f)
    return tx
示例#17
0
    def decoderawtransaction_asm_sighashtype(self):
        """Tests decoding scripts via RPC command "decoderawtransaction".

        This test is in with the "decodescript" tests because they are testing the same "asm" script decodes.
        """

        # this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output
        tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm'])

        # this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
        # it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc
        # verify that we have not altered scriptPubKey decoding.
        tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid'])
        assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm'])
        assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
        assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
        txSave = CTransaction()
        txSave.deserialize(StringIO(unhexlify(tx)))

        # make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
        tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm'])

        # verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks
        tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
        assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])

        # some more full transaction tests of varying specific scriptSigs. used instead of
        # tests in decodescript_script_sig because the decodescript RPC is specifically
        # for working on scriptPubKeys (argh!).
        push_signature = hexlify(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
        signature = push_signature[2:]
        der_signature = signature[:-2]
        signature_sighash_decoded = der_signature + '[ALL]'
        signature_2 = der_signature + '82'
        push_signature_2 = '48' + signature_2
        signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'

        # 1) P2PK scriptSig
        txSave.vin[0].scriptSig = unhexlify(push_signature)
        rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
        assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])

        # make sure that the sighash decodes come out correctly for a more complex / lesser used case.
        txSave.vin[0].scriptSig = unhexlify(push_signature_2)
        rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
        assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])

        # 2) multisig scriptSig
        txSave.vin[0].scriptSig = unhexlify('00' + push_signature + push_signature_2)
        rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
        assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])

        # 3) test a scriptSig that contains more than push operations.
        # in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
        txSave.vin[0].scriptSig = unhexlify('6a143011020701010101010101020601010101010101')
        rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
        print(hexlify('636174'))
        assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
示例#18
0
 def sign_transaction(self, node, tx):
     signresult = node.signrawtransaction(hexlify(tx.serialize()))
     tx = CTransaction()
     f = cStringIO.StringIO(unhexlify(signresult['hex']))
     tx.deserialize(f)
     return tx
示例#19
0
 def sign_transaction(self, node, tx):
     signresult = node.signrawtransaction(hexlify(tx.serialize()))
     tx = CTransaction()
     f = cStringIO.StringIO(unhexlify(signresult['hex']))
     tx.deserialize(f)
     return tx
示例#20
0
 def sign_transaction(self, node, tx):
     signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))
     tx = CTransaction()
     f = BytesIO(hex_str_to_bytes(signresult['hex']))
     tx.deserialize(f)
     return tx
    def get_tests(self):
        start_time = 1490247077 + 600 * 1000 + 101
        long_past_time = start_time - 600 * 1000  # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(
            long_past_time - 100
        )  # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(
            1 + 16 + 2 * 32 + 1)  # 82 blocks generated for inputs
        self.nodes[0].setmocktime(
            start_time
        )  # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82  # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
        self.nodeaddress = self.nodes[0].getnewaddress()

        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
        test_blocks = self.generate_blocks(61, 4)

        # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(
            50, 536870913, test_blocks)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(
            20, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(
            50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(
            24, 536936448, test_blocks)  # 0x20010000 (signalling not)

        # 108 out of 144 signal bit 0 to achieve lock-in
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(
            58, 536870913, test_blocks)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(
            26, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(
            50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(
            10, 536936448, test_blocks)  # 0x20010000 (signalling not)

        # 140 more version 4 blocks
        test_blocks = self.generate_blocks(130, 4, test_blocks)

        extend_txs = []
        # split 50 coinbases into 2 unspents so we have enough unspent txs
        for coinbase_block in self.coinbase_blocks[0:50]:
            amount = (INITIAL_BLOCK_REWARD - 0.01) / 2.0
            addr_a = self.nodes[0].getnewaddress()
            addr_b = self.nodes[0].getnewaddress()
            inputs = [{
                'txid': self.nodes[0].getblock(coinbase_block)['tx'][0],
                'vout': 0
            }]
            outputs = {addr_a: amount, addr_b: amount}
            rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
            res = self.nodes[0].signrawtransaction(rawtx)
            rawtx = res['hex']
            tx = CTransaction()
            f = BytesIO(hex_str_to_bytes(rawtx))
            tx.deserialize(f)
            extend_txs.append(tx)
        test_blocks = self.generate_blocks(10,
                                           4,
                                           test_blocks,
                                           extend_txs=extend_txs)

        yield TestInstance(test_blocks[0:61], sync_every_block=True)  # 1
        # Advanced from DEFINED to STARTED, height = 143
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        yield TestInstance(test_blocks[61:61 + 144],
                           sync_every_block=True)  # 2
        # Failed to advance past STARTED, height = 287
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        yield TestInstance(test_blocks[61 + 144:61 + 144 + 144],
                           sync_every_block=True)  # 3
        # Advanced from STARTED to LOCKED_IN, height = 431
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        yield TestInstance(test_blocks[61 + 144 + 144:61 + 144 + 144 + 130],
                           sync_every_block=True)  # 4

        yield TestInstance(test_blocks[61 + 144 + 144 + 130:61 + 144 + 144 +
                                       130 + 10],
                           sync_every_block=True)  # 4

        self.nodes[0].generate(1)
        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
        self.tipheight += 1
        self.last_block_time += 600

        self.unspents = []
        for unspent in self.nodes[0].listunspent():
            if unspent['spendable']:
                self.unspents.append(
                    (unspent['txid'], unspent['vout'], unspent['amount']))

        ### Inputs at height = 572
        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for i in range(16):
            bip68inputs.append(
                self.send_generic_unspent_input_tx(self.nodes[0],
                                                   self.unspents.pop()))
        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(
                    self.send_generic_unspent_input_tx(self.nodes[0],
                                                       self.unspents.pop()))
            bip112basicinputs.append(inputs)
        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(
                    self.send_generic_unspent_input_tx(self.nodes[0],
                                                       self.unspents.pop()))
            bip112diverseinputs.append(inputs)
        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = self.send_generic_unspent_input_tx(
            self.nodes[0], self.unspents.pop())
        # 1 normal input
        bip113input = self.send_generic_unspent_input_tx(
            self.nodes[0], self.unspents.pop())

        self.nodes[0].setmocktime(self.last_block_time + 600)
        inputblockhash = self.nodes[0].generate(1)[
            0]  # 1 block generated for inputs to be in chain at height 572
        self.nodes[0].setmocktime(0)
        self.tip = int("0x" + inputblockhash, 0)
        self.tipheight += 1
        self.last_block_time += 600
        assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]),
                     82 + 1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(1, 4)
        yield TestInstance(test_blocks, sync_every_block=False)  # 5
        # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
        assert_equal(
            get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = self.create_transaction(self.nodes[0],
                                              bip113input, self.nodeaddress,
                                              Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = self.create_transaction(self.nodes[0],
                                              bip113input, self.nodeaddress,
                                              Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
        bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = self.create_bip112txs(
            bip112basicinputs[0], False, 1)
        bip112txs_vary_nSequence_v2 = self.create_bip112txs(
            bip112basicinputs[0], False, 2)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(
            bip112basicinputs[1], False, 1, -1)
        bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(
            bip112basicinputs[1], False, 2, -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(
            bip112diverseinputs[0], True, 1)
        bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(
            bip112diverseinputs[0], True, 2)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(
            bip112diverseinputs[1], True, 1, -1)
        bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(
            bip112diverseinputs[1], True, 2, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
        bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)

        ### TESTING ###
        ##################################
        ### Before Soft Forks Activate ###
        ##################################
        # All txs should pass
        ### Version 1 txs ###
        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        yield TestInstance([[self.create_test_block(success_txs), True]])  # 6
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ### Version 2 txs ###
        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        yield TestInstance([[self.create_test_block(success_txs), True]])  # 7
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
        test_blocks = self.generate_blocks(1, 4)
        yield TestInstance(test_blocks, sync_every_block=False)  # 8
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        #################################
        ### After Soft Forks Activate ###
        #################################
        ### BIP 113 ###
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            yield TestInstance([[self.create_test_block([bip113tx]),
                                 False]])  # 9,10
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            yield TestInstance([[self.create_test_block([bip113tx]),
                                 True]])  # 11,12
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 580 after 4 blocks of random version
        test_blocks = self.generate_blocks(4, 1234)
        yield TestInstance(test_blocks, sync_every_block=False)  # 13

        ### BIP 68 ###
        ### Version 1 txs ###
        # All still pass
        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        yield TestInstance([[self.create_test_block(success_txs), True]])  # 14
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ### Version 2 txs ###
        bip68success_txs = []
        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
        yield TestInstance([[self.create_test_block(bip68success_txs),
                             True]])  # 15
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = []
        for b25 in range(2):
            for b18 in range(2):
                bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
        for tx in bip68timetxs:
            yield TestInstance([[self.create_test_block([tx]),
                                 False]])  # 16 - 19
        bip68heighttxs = []
        for b25 in range(2):
            for b18 in range(2):
                bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
        for tx in bip68heighttxs:
            yield TestInstance([[self.create_test_block([tx]),
                                 False]])  # 20 - 23

        # Advance one block to 581
        test_blocks = self.generate_blocks(1, 1234)
        yield TestInstance(test_blocks, sync_every_block=False)  # 24

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        yield TestInstance([[self.create_test_block(bip68success_txs),
                             True]])  # 25
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            yield TestInstance([[self.create_test_block([tx]),
                                 False]])  # 26 - 29

        # Advance one block to 583
        test_blocks = self.generate_blocks(2, 1234)
        yield TestInstance(test_blocks, sync_every_block=False)  # 30

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        yield TestInstance([[self.create_test_block(bip68success_txs),
                             True]])  # 31
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ### BIP 112 ###
        ### Version 1 txs ###
        # -1 OP_CSV tx should fail
        yield TestInstance(
            [[self.create_test_block([bip112tx_special_v1]), False]])  #32
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
        success_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    success_txs.append(
                        bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
                    success_txs.append(
                        bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
        yield TestInstance([[self.create_test_block(success_txs), True]])  # 33
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = []
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
                    fail_txs.append(
                        bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])

        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]),
                                 False]])  # 34 - 81

        ### Version 2 txs ###
        # -1 OP_CSV tx should fail
        yield TestInstance(
            [[self.create_test_block([bip112tx_special_v2]), False]])  #82

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22]
                                       [b18])  # 8/16 of vary_OP_CSV
                    success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22]
                                       [b18])  # 8/16 of vary_OP_CSV_9

        yield TestInstance([[self.create_test_block(success_txs), True]])  # 83
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = []
        fail_txs.extend(all_rlt_txs(
            bip112txs_vary_nSequence_9_v2))  # 16/16 of vary_nSequence_9
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22]
                                    [b18])  # 16/16 of vary_OP_CSV_9

        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]),
                                 False]])  # 84 - 107

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22]
                                    [b18])  # 8/16 of vary_nSequence
        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]),
                                 False]])  # 108-115

        # If sequencelock types mismatch, tx should fail
        fail_txs = []
        for b25 in range(2):
            for b18 in range(2):
                fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1]
                                [b18])  # 12/16 of vary_nSequence
                fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1]
                                [b18])  # 12/16 of vary_OP_CSV
        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]),
                                 False]])  # 116-123

        # Remaining txs should pass, just test masking works properly
        success_txs = []
        for b25 in range(2):
            for b18 in range(2):
                success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0]
                                   [b18])  # 16/16 of vary_nSequence
                success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0]
                                   [b18])  # 16/16 of vary_OP_CSV
        yield TestInstance([[self.create_test_block(success_txs),
                             True]])  # 124
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for b25 in range(2):
            for b18 in range(2):
                tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
                tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
                signtx = self.sign_transaction(self.nodes[0], tx)
                time_txs.append(signtx)
        yield TestInstance([[self.create_test_block(time_txs), True]])  # 125
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
    def decoderawtransaction_asm_sighashtype(self):
        """Test decoding scripts via RPC command "decoderawtransaction".

        This test is in with the "decodescript" tests because they are testing the same "asm" script decodes.
        """

        # this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output
        tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm'])

        # this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
        # it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc
        # verify that we have not altered scriptPubKey decoding.
        tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid'])
        assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm'])
        assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
        assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
        txSave = CTransaction()
        txSave.deserialize(BytesIO(hex_str_to_bytes(tx)))

        # make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
        tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm'])

        # verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks
        tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000'
        rpc_result = self.nodes[0].decoderawtransaction(tx)
        assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
        assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])

        # some more full transaction tests of varying specific scriptSigs. used instead of
        # tests in decodescript_script_sig because the decodescript RPC is specifically
        # for working on scriptPubKeys (argh!).
        push_signature = bytes_to_hex_str(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
        signature = push_signature[2:]
        der_signature = signature[:-2]
        signature_sighash_decoded = der_signature + '[ALL]'
        signature_2 = der_signature + '82'
        push_signature_2 = '48' + signature_2
        signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'

        # 1) P2PK scriptSig
        txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature)
        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
        assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])

        # make sure that the sighash decodes come out correctly for a more complex / lesser used case.
        txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature_2)
        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
        assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])

        # 2) multisig scriptSig
        txSave.vin[0].scriptSig = hex_str_to_bytes('00' + push_signature + push_signature_2)
        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
        assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])

        # 3) test a scriptSig that contains more than push operations.
        # in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
        txSave.vin[0].scriptSig = hex_str_to_bytes('6a143011020701010101010101020601010101010101')
        rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
        assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
示例#23
0
    def issue_reissue_transfer_test(self):
        ########################################
        # activate assets
        self.nodes[0].generate(500)
        self.sync_all()

        ########################################
        # issue
        to_address = self.nodes[0].getnewaddress()
        change_address = self.nodes[0].getnewaddress()
        unspent = self.nodes[0].listunspent()[0]
        inputs = [{k: unspent[k] for k in ['txid', 'vout']}]
        outputs = {
            'n1issueAssetXXXXXXXXXXXXXXXXWdnemQ': 500,
            change_address: float(unspent['amount']) - 500.0001,
            to_address: {
                'issue': {
                    'asset_name': 'TEST_ASSET',
                    'asset_quantity': 1000,
                    'units': 0,
                    'reissuable': 1,
                    'has_ipfs': 0,
                }
            }
        }
        tx_issue = self.nodes[0].createrawtransaction(inputs, outputs)
        tx_issue_signed = self.nodes[0].signrawtransaction(tx_issue)
        tx_issue_hash = self.nodes[0].sendrawtransaction(
            tx_issue_signed['hex'])
        assert_is_hash_string(tx_issue_hash)
        self.log.info("issue tx: " + tx_issue_hash)

        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(1000,
                     self.nodes[0].listmyassets('TEST_ASSET')['TEST_ASSET'])
        assert_equal(1,
                     self.nodes[0].listmyassets('TEST_ASSET!')['TEST_ASSET!'])

        ########################################
        # reissue
        unspent = self.nodes[0].listunspent()[0]
        unspent_asset_owner = self.nodes[0].listmyassets(
            'TEST_ASSET!', True)['TEST_ASSET!']['outpoints'][0]

        inputs = [
            {k: unspent[k]
             for k in ['txid', 'vout']},
            {k: unspent_asset_owner[k]
             for k in ['txid', 'vout']},
        ]

        outputs = {
            'n1ReissueAssetXXXXXXXXXXXXXXWG9NLd': 100,
            change_address: float(unspent['amount']) - 100.0001,
            to_address: {
                'reissue': {
                    'asset_name': 'TEST_ASSET',
                    'asset_quantity': 1000,
                }
            }
        }

        tx_reissue = self.nodes[0].createrawtransaction(inputs, outputs)
        tx_reissue_signed = self.nodes[0].signrawtransaction(tx_reissue)
        tx_reissue_hash = self.nodes[0].sendrawtransaction(
            tx_reissue_signed['hex'])
        assert_is_hash_string(tx_reissue_hash)
        self.log.info("reissue tx: " + tx_reissue_hash)

        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(2000,
                     self.nodes[0].listmyassets('TEST_ASSET')['TEST_ASSET'])
        assert_equal(1,
                     self.nodes[0].listmyassets('TEST_ASSET!')['TEST_ASSET!'])

        self.sync_all()

        ########################################
        # transfer
        remote_to_address = self.nodes[1].getnewaddress()
        unspent = self.nodes[0].listunspent()[0]
        unspent_asset = self.nodes[0].listmyassets(
            'TEST_ASSET', True)['TEST_ASSET']['outpoints'][0]
        inputs = [
            {k: unspent[k]
             for k in ['txid', 'vout']},
            {k: unspent_asset[k]
             for k in ['txid', 'vout']},
        ]
        outputs = {
            change_address: float(unspent['amount']) - 0.0001,
            remote_to_address: {
                'transfer': {
                    'TEST_ASSET': 400
                }
            },
            to_address: {
                'transfer': {
                    'TEST_ASSET': float(unspent_asset['amount']) - 400
                }
            }
        }
        tx_transfer = self.nodes[0].createrawtransaction(inputs, outputs)
        tx_transfer_signed = self.nodes[0].signrawtransaction(tx_transfer)
        tx_hex = tx_transfer_signed['hex']

        ########################################
        # try tampering with the sig
        tx = CTransaction()
        f = BytesIO(hex_str_to_bytes(tx_hex))
        tx.deserialize(f)
        script_sig = bytes_to_hex_str(tx.vin[1].scriptSig)
        tampered_sig = script_sig[:-8] + "deadbeef"
        tx.vin[1].scriptSig = hex_str_to_bytes(tampered_sig)
        tampered_hex = bytes_to_hex_str(tx.serialize())
        assert_raises_rpc_error(
            -26,
            "mandatory-script-verify-flag-failed (Script failed an OP_EQUALVERIFY operation)",
            self.nodes[0].sendrawtransaction, tampered_hex)

        ########################################
        # try tampering with the asset script
        tx = CTransaction()
        f = BytesIO(hex_str_to_bytes(tx_hex))
        tx.deserialize(f)
        rvnt = '72766e74'  #rvnt
        op_drop = '75'
        # change asset outputs from 400,600 to 500,500
        for i in range(1, 3):
            script_hex = bytes_to_hex_str(tx.vout[i].scriptPubKey)
            f = BytesIO(
                hex_str_to_bytes(script_hex[script_hex.index(rvnt) +
                                            len(rvnt):-len(op_drop)]))
            transfer = CScriptTransfer()
            transfer.deserialize(f)
            transfer.amount = 50000000000
            tampered_transfer = bytes_to_hex_str(transfer.serialize())
            tampered_script = script_hex[:script_hex.index(
                rvnt)] + rvnt + tampered_transfer + op_drop
            tx.vout[i].scriptPubKey = hex_str_to_bytes(tampered_script)
            t2 = CScriptTransfer()
            t2.deserialize(BytesIO(hex_str_to_bytes(tampered_transfer)))
        tampered_hex = bytes_to_hex_str(tx.serialize())
        assert_raises_rpc_error(
            -26,
            "mandatory-script-verify-flag-failed (Signature must be zero for failed CHECK(MULTI)SIG operation)",
            self.nodes[0].sendrawtransaction, tampered_hex)

        ########################################
        # try tampering with asset outs so ins and outs don't add up
        for n in (-20, -2, -1, 1, 2, 20):
            bad_outputs = {
                change_address: float(unspent['amount']) - 0.0001,
                remote_to_address: {
                    'transfer': {
                        'TEST_ASSET': 400
                    }
                },
                to_address: {
                    'transfer': {
                        'TEST_ASSET':
                        float(unspent_asset['amount']) - (400 + n)
                    }
                }
            }
            tx_bad_transfer = self.nodes[0].createrawtransaction(
                inputs, bad_outputs)
            tx_bad_transfer_signed = self.nodes[0].signrawtransaction(
                tx_bad_transfer)
            tx_bad_hex = tx_bad_transfer_signed['hex']
            assert_raises_rpc_error(
                -26, "bad-tx-asset-inputs-amount-mismatch-with-outputs-amount",
                self.nodes[0].sendrawtransaction, tx_bad_hex)

        ########################################
        # try tampering with asset outs so they don't use proper units
        for n in (-0.1, -0.00000001, 0.1, 0.00000001):
            bad_outputs = {
                change_address: float(unspent['amount']) - 0.0001,
                remote_to_address: {
                    'transfer': {
                        'TEST_ASSET': (400 + n)
                    }
                },
                to_address: {
                    'transfer': {
                        'TEST_ASSET':
                        float(unspent_asset['amount']) - (400 + n)
                    }
                }
            }
            tx_bad_transfer = self.nodes[0].createrawtransaction(
                inputs, bad_outputs)
            tx_bad_transfer_signed = self.nodes[0].signrawtransaction(
                tx_bad_transfer)
            tx_bad_hex = tx_bad_transfer_signed['hex']
            assert_raises_rpc_error(
                -26, "bad-txns-transfer-asset-amount-not-match-units",
                self.nodes[0].sendrawtransaction, tx_bad_hex)

        ########################################
        # send the good transfer
        tx_transfer_hash = self.nodes[0].sendrawtransaction(tx_hex)
        assert_is_hash_string(tx_transfer_hash)
        self.log.info("transfer tx: " + tx_transfer_hash)

        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(1600,
                     self.nodes[0].listmyassets('TEST_ASSET')['TEST_ASSET'])
        assert_equal(1,
                     self.nodes[0].listmyassets('TEST_ASSET!')['TEST_ASSET!'])
        assert_equal(400,
                     self.nodes[1].listmyassets('TEST_ASSET')['TEST_ASSET'])
示例#24
0
def txFromHex(hexstring):
    tx = CTransaction()
    f = cStringIO.StringIO(binascii.unhexlify(hexstring))
    tx.deserialize(f)
    return tx
示例#25
0
def txFromHex(hexstring):
    tx = CTransaction()
    f = BytesIO(hex_str_to_bytes(hexstring))
    tx.deserialize(f)
    return tx
 def sign_transaction(self, node, tx):
     signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))
     tx = CTransaction()
     f = BytesIO(hex_str_to_bytes(signresult['hex']))
     tx.deserialize(f)
     return tx
示例#27
0
    def run_test(self):
        # prepare some coins for multiple *rawtransaction commands
        self.nodes[2].generate(1)
        self.sync_all()
        self.nodes[0].generate(101)
        self.sync_all()
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
        self.sync_all()
        self.nodes[0].generate(5)
        self.sync_all()

        #
        # sendrawtransaction with missing input #
        #
        inputs = [{
            'txid':
            "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000",
            'vout': 1
        }]
        # won't exists
        outputs = {self.nodes[0].getnewaddress(): 4.998}
        rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
        rawtx = self.nodes[2].signrawtransaction(rawtx)

        # This will raise an exception since there are missing inputs
        assert_raises_rpc_error(-25, "Missing inputs",
                                self.nodes[2].sendrawtransaction, rawtx['hex'])

        #
        # RAW TX MULTISIG TESTS #
        #
        # 2of2 test
        addr1 = self.nodes[2].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()

        addr1Obj = self.nodes[2].validateaddress(addr1)
        addr2Obj = self.nodes[2].validateaddress(addr2)

        mSigObj = self.nodes[2].addmultisigaddress(
            2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
        mSigObjValid = self.nodes[2].validateaddress(mSigObj)

        # use balance deltas instead of absolute values
        bal = self.nodes[2].getbalance()

        # send 1.2 BTC to msig adr
        txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        # node2 has both keys of the 2of2 ms addr., tx should affect the
        # balance
        assert_equal(self.nodes[2].getbalance(), bal + Decimal('1.20000000'))

        # 2of3 test from different nodes
        bal = self.nodes[2].getbalance()
        addr1 = self.nodes[1].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()
        addr3 = self.nodes[2].getnewaddress()

        addr1Obj = self.nodes[1].validateaddress(addr1)
        addr2Obj = self.nodes[2].validateaddress(addr2)
        addr3Obj = self.nodes[2].validateaddress(addr3)

        mSigObj = self.nodes[2].addmultisigaddress(
            2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
        mSigObjValid = self.nodes[2].validateaddress(mSigObj)

        txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
        decTx = self.nodes[0].gettransaction(txId)
        rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
        sPK = rawTx['vout'][0]['scriptPubKey']['hex']
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()

        # THIS IS A INCOMPLETE FEATURE
        # NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND
        # COUNT AT BALANCE CALCULATION
        # for now, assume the funds of a 2of3 multisig tx are not marked as
        # spendable
        assert_equal(self.nodes[2].getbalance(), bal)

        txDetails = self.nodes[0].gettransaction(txId, True)
        rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
        vout = False
        for outpoint in rawTx['vout']:
            if outpoint['value'] == Decimal('2.20000000'):
                vout = outpoint
                break

        bal = self.nodes[0].getbalance()
        inputs = [{
            "txid": txId,
            "vout": vout['n'],
            "scriptPubKey": vout['scriptPubKey']['hex'],
            "amount": vout['value'],
        }]
        outputs = {self.nodes[0].getnewaddress(): 2.19}
        rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
        rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
        # node1 only has one key, can't comp. sign the tx
        assert_equal(rawTxPartialSigned['complete'], False)

        rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
        # node2 can sign the tx compl., own two of three keys
        assert_equal(rawTxSigned['complete'], True)
        self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
        rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[0].getbalance(), bal + Decimal('50.00000000') +
                     Decimal('2.19000000'))  # block reward + tx

        # getrawtransaction tests
        # 1. valid parameters - only supply txid
        txHash = rawTx["hash"]
        assert_equal(self.nodes[0].getrawtransaction(txHash),
                     rawTxSigned['hex'])

        # 2. valid parameters - supply txid and 0 for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txHash, 0),
                     rawTxSigned['hex'])

        # 3. valid parameters - supply txid and False for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txHash, False),
                     rawTxSigned['hex'])

        # 4. valid parameters - supply txid and 1 for verbose.
        # We only check the "hex" field of the output so we don't need to
        # update this test every time the output format changes.
        assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"],
                     rawTxSigned['hex'])

        # 5. valid parameters - supply txid and True for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"],
                     rawTxSigned['hex'])

        # 6. invalid parameters - supply txid and string "Flase"
        assert_raises_rpc_error(-3, "Invalid type",
                                self.nodes[0].getrawtransaction, txHash,
                                "False")

        # 7. invalid parameters - supply txid and empty array
        assert_raises_rpc_error(-3, "Invalid type",
                                self.nodes[0].getrawtransaction, txHash, [])

        # 8. invalid parameters - supply txid and empty dict
        assert_raises_rpc_error(-3, "Invalid type",
                                self.nodes[0].getrawtransaction, txHash, {})

        inputs = [{
            'txid':
            "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000",
            'vout': 1,
            'sequence': 1000
        }]
        outputs = {self.nodes[0].getnewaddress(): 1}
        rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
        decrawtx = self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['vin'][0]['sequence'], 1000)

        # 9. invalid parameters - sequence number out of range
        inputs = [{
            'txid':
            "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000",
            'vout': 1,
            'sequence': -1
        }]
        outputs = {self.nodes[0].getnewaddress(): 1}
        assert_raises_rpc_error(
            -8, 'Invalid parameter, sequence number is out of range',
            self.nodes[0].createrawtransaction, inputs, outputs)

        # 10. invalid parameters - sequence number out of range
        inputs = [{
            'txid':
            "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000",
            'vout': 1,
            'sequence': 4294967296
        }]
        outputs = {self.nodes[0].getnewaddress(): 1}
        assert_raises_rpc_error(
            -8, 'Invalid parameter, sequence number is out of range',
            self.nodes[0].createrawtransaction, inputs, outputs)

        inputs = [{
            'txid':
            "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000",
            'vout': 1,
            'sequence': 4294967294
        }]
        outputs = {self.nodes[0].getnewaddress(): 1}
        rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
        decrawtx = self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)

        # 11. check if getrawtransaction with verbose 'True' returns blockheight
        assert isinstance(
            self.nodes[0].getrawtransaction(txHash, True)['blockheight'], int)
        txList = self.nodes[0].getblockbyheight(
            self.nodes[0].getrawtransaction(txHash, True)['blockheight'])['tx']
        assert txHash in txList

        # tests with transactions containing data
        # 1. sending ffffffff, we get 006a04ffffffff
        # 00(OP_FALSE) 6a(OP_RETURN) 04(size of data, 4 bytes in this case) ffffffff(data)
        addr = self.nodes[0].getnewaddress()
        txid = self.nodes[0].sendtoaddress(addr, 2.0)
        inputs = [{
            "txid": txid,
            "vout": 0,
        }]
        outputs = {self.nodes[0].getnewaddress(): 0.5, "data": 'ffffffff'}
        rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
        tx = CTransaction()
        f = BytesIO(hex_str_to_bytes(rawtx))
        tx.deserialize(f)
        assert_equal(tx.vout[1].scriptPubKey.hex(), "006a04ffffffff")

        # 2. sending ffffffff00000000, we get 006a08ffffffff00000000
        # 00(OP_FALSE) 6a(OP_RETURN) 08(size of data, 8 bytes in this case) ffffffff00000000(data)
        addr = self.nodes[0].getnewaddress()
        txid = self.nodes[0].sendtoaddress(addr, 2.0)
        inputs = [{
            "txid": txid,
            "vout": 0,
        }]
        outputs = {
            self.nodes[0].getnewaddress(): 0.5,
            "data": 'ffffffff00000000'
        }
        rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
        tx = CTransaction()
        f = BytesIO(hex_str_to_bytes(rawtx))
        tx.deserialize(f)
        assert_equal(tx.vout[1].scriptPubKey.hex(), "006a08ffffffff00000000")

        #
        # Submit transaction without checking fee 1/2 #
        #
        self.nodes[3].generate(101)
        self.sync_all()
        txId = self.nodes[3].sendtoaddress(self.nodes[3].getnewaddress(), 30)
        rawtx = self.nodes[3].getrawtransaction(txId, 1)
        # Identify the 30btc output
        nOut = next(i for i, vout in enumerate(rawtx["vout"])
                    if vout["value"] == Decimal("30"))
        inputs2 = []
        outputs2 = {}
        inputs2.append({"txid": txId, "vout": nOut})
        outputs2 = {self.nodes[3].getnewaddress(): 30}
        raw_tx2 = self.nodes[3].createrawtransaction(inputs2, outputs2)
        tx_hex2 = self.nodes[3].signrawtransaction(raw_tx2)["hex"]
        assert_raises_rpc_error(-26, "insufficient priority",
                                self.nodes[3].sendrawtransaction, tx_hex2,
                                False, False)
        txid2 = self.nodes[3].sendrawtransaction(tx_hex2, False, True)
        mempool = self.nodes[3].getrawmempool(False)
        assert (txid2 in mempool)

        self.nodes[3].generate(1)
        self.sync_all()
        assert_equal(self.nodes[3].gettransaction(txid2)["txid"], txid2)

        #
        # Submit transaction without checking fee 2/2 #
        #
        txouts = gen_return_txouts()
        relayfee = self.nodes[3].getnetworkinfo()['relayfee']

        utxos = create_confirmed_utxos(relayfee, self.nodes[3], 14)
        us0 = utxos.pop()

        base_fee = relayfee * 100
        create_lots_of_big_transactions(self.nodes[3], txouts, utxos, 13,
                                        base_fee)

        inputs = [{"txid": us0["txid"], "vout": us0["vout"]}]
        outputs = {}
        outputs = {self.nodes[3].getnewaddress(): us0["amount"]}
        rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
        newtx = rawtx[0:92]
        newtx = newtx + txouts
        newtx = newtx + rawtx[94:]
        signresult = self.nodes[3].signrawtransaction(newtx, None, None,
                                                      "NONE|FORKID")
        mempoolsize = self.nodes[3].getmempoolinfo()['size']
        assert_raises_rpc_error(-26, "insufficient priority",
                                self.nodes[3].sendrawtransaction,
                                signresult["hex"], False, False)
        txid_new = self.nodes[3].sendrawtransaction(signresult["hex"], False,
                                                    True)
        mempoolsize_new = self.nodes[3].getmempoolinfo()['size']
        assert (txid_new in self.nodes[3].getrawmempool())
        assert_equal(mempoolsize_new, mempoolsize)
示例#28
0
    def get_tests(self):
        start_time = 1490247077 + POW_TARGET_SPACING * 1000 + 101
        long_past_time = start_time- POW_TARGET_SPACING * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
        self.nodes[0].setmocktime(start_time) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82 # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
        self.nodeaddress = self.nodes[0].getnewaddress()

        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
        test_blocks = self.generate_blocks(61, 4)

        # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(50, 536870913, test_blocks) # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)

        # 108 out of 144 signal bit 0 to achieve lock-in
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(58, 536870913, test_blocks) # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)

        # 140 more version 4 blocks
        test_blocks = self.generate_blocks(130, 4, test_blocks)

        extend_txs = []
        # split 50 coinbases into 2 unspents so we have enough unspent txs
        for coinbase_block in self.coinbase_blocks[0:50]:
            amount = (INITIAL_BLOCK_REWARD-0.01) / 2.0
            addr_a = self.nodes[0].getnewaddress()
            addr_b = self.nodes[0].getnewaddress()
            inputs = [{'txid': self.nodes[0].getblock(coinbase_block)['tx'][0], 'vout': 0}]
            outputs = {
                addr_a : amount,
                addr_b : amount
            }
            rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
            res = self.nodes[0].signrawtransaction(rawtx)
            rawtx = res['hex']
            tx = CTransaction()
            f = BytesIO(hex_str_to_bytes(rawtx))
            tx.deserialize(f)
            extend_txs.append(tx)
        test_blocks = self.generate_blocks(10, 4, test_blocks, extend_txs=extend_txs)


        yield TestInstance(test_blocks[0:61], sync_every_block=True) # 1
        # Advanced from DEFINED to STARTED, height = 143
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        yield TestInstance(test_blocks[61:61+144], sync_every_block=True) # 2
        # Failed to advance past STARTED, height = 287
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        yield TestInstance(test_blocks[61+144:61+144+144], sync_every_block=True) # 3
        # Advanced from STARTED to LOCKED_IN, height = 431
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        yield TestInstance(test_blocks[61+144+144:61+144+144+130], sync_every_block=True) # 4

        yield TestInstance(test_blocks[61+144+144+130:61+144+144+130+10], sync_every_block=True) # 4

        self.nodes[0].generate(1)
        self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
        self.tipheight += 1
        self.last_block_time += POW_TARGET_SPACING

        self.unspents = []
        for unspent in self.nodes[0].listunspent():
            if unspent['spendable']:
                self.unspents.append((unspent['txid'], unspent['vout'], unspent['amount']))


        ### Inputs at height = 572
        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for i in range(16):
            bip68inputs.append(self.send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop()))
        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(self.send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop()))
            bip112basicinputs.append(inputs)
        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(self.send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop()))
            bip112diverseinputs.append(inputs)
        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = self.send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop())
        # 1 normal input
        bip113input = self.send_generic_unspent_input_tx(self.nodes[0], self.unspents.pop())

        self.nodes[0].setmocktime(self.last_block_time + POW_TARGET_SPACING)
        inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
        self.nodes[0].setmocktime(start_time)
        self.tip = int("0x" + inputblockhash, 0)
        self.tipheight += 1
        self.last_block_time += POW_TARGET_SPACING
        assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(1, 4)
        yield TestInstance(test_blocks, sync_every_block=False) # 5
        # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
        bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
        bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
        bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
        bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
        bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
        bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)


        ### TESTING ###
        ##################################
        ### Before Soft Forks Activate ###
        ##################################
        # All txs should pass
        ### Version 1 txs ###
        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - POW_TARGET_SPACING * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ### Version 2 txs ###
        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - POW_TARGET_SPACING * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())


        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
        test_blocks = self.generate_blocks(1, 4)
        yield TestInstance(test_blocks, sync_every_block=False) # 8
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')


        #################################
        ### After Soft Forks Activate ###
        #################################
        ### BIP 113 ###
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - POW_TARGET_SPACING * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - POW_TARGET_SPACING * 5 # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - POW_TARGET_SPACING * 5 - 1 # < MTP of prior block
        bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - POW_TARGET_SPACING * 5 - 1 # < MTP of prior block
        bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 580 after 4 blocks of random version
        test_blocks = self.generate_blocks(4, 1234)
        yield TestInstance(test_blocks, sync_every_block=False) # 13

        ### BIP 68 ###
        ### Version 1 txs ###
        # All still pass
        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ### Version 2 txs ###
        bip68success_txs = []
        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = []
        for b25 in range(2):
            for b18 in range(2):
                bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
        for tx in bip68timetxs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
        bip68heighttxs = []
        for b25 in range(2):
            for b18 in range(2):
                bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
        for tx in bip68heighttxs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23

        # Advance one block to 581
        test_blocks = self.generate_blocks(1, 1234)
        yield TestInstance(test_blocks, sync_every_block=False) # 24

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29

        # Advance two blocks to 583
        test_blocks = self.generate_blocks(2, 1234)
        yield TestInstance(test_blocks, sync_every_block=False) # 30

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())


        ### BIP 112 ###
        ### Version 1 txs ###
        # -1 OP_CSV tx should fail
        yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
        success_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
                    success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = []
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
                    fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])

        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81

        ### Version 2 txs ###
        # -1 OP_CSV tx should fail
        yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
                    success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9

        yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = []
        fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9

        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = []
        for b25 in range(2):
            for b22 in range(2):
                for b18 in range(2):
                    fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115

        # If sequencelock types mismatch, tx should fail
        fail_txs = []
        for b25 in range(2):
            for b18 in range(2):
                fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
                fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
        for tx in fail_txs:
            yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123

        # Remaining txs should pass, just test masking works properly
        success_txs = []
        for b25 in range(2):
            for b18 in range(2):
                success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
                success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
        yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for b25 in range(2):
            for b18 in range(2):
                tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
                tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
                signtx = self.sign_transaction(self.nodes[0], tx)
                time_txs.append(signtx)
        yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())