Example #1
0
    def run_test(self):
        # Add p2p connection to node0
        node = self.nodes[0]  # convenience reference to the node
        node.add_p2p_connection(P2PDataStore())

        network_thread_start()
        node.p2p.wait_for_verack()

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block], node, success=True)

        self.log.info("Mature the block.")
        self.nodes[0].generate(100)

        # b'\x64' is OP_NOTIF
        # Transaction will be rejected with code 16 (REJECT_INVALID)
        tx1 = create_transaction(block1.vtx[0], 0, b'\x64', 50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1], node, success=False, reject_code=16, reject_reason=b'mandatory-script-verify-flag-failed (Invalid OP_IF construction)')

        # Verify valid transaction
        tx1 = create_transaction(block1.vtx[0], 0, b'', 50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1], node, success=True)
    def run_test(self):

        # First, quick check that CSV is ACTIVE at genesis
        assert_equal(self.nodes[0].getblockcount(), 0)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        self.nodes[0].add_p2p_connection(P2PInterface())

        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that blocks past the genesis block must be at least version 4")

        # Create a v3 block
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(1), block_time)
        block.nVersion = 3
        block.solve()

        # The best block should not have changed, because...
        assert_equal(self.nodes[0].getbestblockhash(), tip)

        # ... we rejected it because it is v3
        with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]):
            # Send it to the node
            self.nodes[0].p2p.send_and_ping(msg_block(block))

        self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted")

        # Generate 100 blocks so that first coinbase matures
        generated_blocks = self.nodes[0].generate(100)
        spendable_coinbase_txid = self.nodes[0].getblock(generated_blocks[0])['tx'][0]
        coinbase_value = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(spendable_coinbase_txid)["hex"])["vout"][0]["value"]
        tip = generated_blocks[-1]

        # Construct a v4 block
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(len(generated_blocks) + 1), block_time)
        block.nVersion = 4

        # Create a CLTV transaction
        spendtx = create_transaction(self.nodes[0], spendable_coinbase_txid,
                self.nodeaddress, amount=1.0, fee=coinbase_value-1)
        spendtx = cltv_validate(self.nodes[0], spendtx, 1)
        spendtx.rehash()

        # Add the CLTV transaction and prepare for sending
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        # Send block and check that it becomes new best block
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
def create_bip68txs(node, bip68inputs, txversion, address, locktime_delta=0):
    """Returns a list of bip68 transactions with different bits set."""
    txs = []
    assert(len(bip68inputs) >= 16)
    for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
        locktime = relative_locktime(sdf, srhb, stf, srlb)
        tx = create_transaction(node, bip68inputs[i], address, amount=Decimal("49.98"))
        tx.nVersion = txversion
        tx.vin[0].nSequence = locktime + locktime_delta
        tx = sign_transaction(node, tx)
        tx.rehash()
        txs.append({'tx': tx, 'sdf': sdf, 'stf': stf})

    return txs
    def run_test(self):
        util.node_fastmerkle = self.nodes[0]
        self.address = self.nodes[0].getnewaddress()
        self.ms_address = self.nodes[0].addmultisigaddress(1, [self.address])['address']
        self.wit_address = self.nodes[0].addwitnessaddress(self.address)
        self.wit_ms_address = self.nodes[0].addmultisigaddress(1, [self.address], '', 'p2sh-segwit')['address']

        self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
        coinbase_txid = []
        for i in self.coinbase_blocks:
            coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
        self.nodes[0].generate(427) # Block 429
        self.lastblockhash = self.nodes[0].getbestblockhash()
        self.tip = int("0x" + self.lastblockhash, 0)
        self.lastblockheight = 429
        self.lastblocktime = int(time.time()) + 429

        self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
        coinbase_value = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(coinbase_txid[0])["hex"])["vout"][0]["value"]
        test1txs = [create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, amount=49, fee=coinbase_value-49)]
        txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
        test1txs.append(create_transaction(self.nodes[0], txid1, self.ms_address, amount=48, fee=49-48))
        txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
        coinbase_value = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(coinbase_txid[1])["hex"])["vout"][0]["value"]
        test1txs.append(create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, amount=49, fee=coinbase_value-49))
        txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
        self.block_submit(self.nodes[0], test1txs, False, True)

        self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
        test2tx = create_transaction(self.nodes[0], txid2, self.ms_address, amount=47, fee=48-47)
        trueDummy(test2tx)
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)

        self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
        self.block_submit(self.nodes[0], [test2tx], False, True)

        self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
        test4tx = create_transaction(self.nodes[0], test2tx.hash, self.address, amount=46, fee=47-46)
        test6txs=[CTransaction(test4tx)]
        trueDummy(test4tx)
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
        self.block_submit(self.nodes[0], [test4tx])

        self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
        test5tx = create_transaction(self.nodes[0], txid3, self.wit_address, amount=48, fee=49-48)
        test6txs.append(CTransaction(test5tx))
        test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
        self.block_submit(self.nodes[0], [test5tx], True)

        self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
        for i in test6txs:
            self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True)
        self.block_submit(self.nodes[0], test6txs, True, True)
def create_bip112txs(node, bip112inputs, varyOP_CSV, txversion, address, locktime_delta=0):
    """Returns a list of bip68 transactions with different bits set."""
    txs = []
    assert(len(bip112inputs) >= 16)
    for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
        locktime = relative_locktime(sdf, srhb, stf, srlb)
        tx = create_transaction(node, bip112inputs[i], address, amount=Decimal("49.98"))
        if (varyOP_CSV):  # if varying OP_CSV, nSequence is fixed
            tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta
        else:  # vary nSequence instead, OP_CSV is fixed
            tx.vin[0].nSequence = locktime + locktime_delta
        tx.nVersion = txversion
        signtx = sign_transaction(node, tx)
        if (varyOP_CSV):
            signtx.vin[0].scriptSig = CScript([locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
        else:
            signtx.vin[0].scriptSig = CScript([BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
        tx.rehash()
        txs.append({'tx': signtx, 'sdf': sdf, 'stf': stf})
    return txs
Example #6
0
    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block], node, success=True)

        self.log.info("Mature the block.")
        self.nodes[0].generate(100)

        # b'\x64' is OP_NOTIF
        # Transaction will be rejected with code 16 (REJECT_INVALID)
        # and we get disconnected immediately
        self.log.info('Test a transaction that is rejected')
        tx1 = create_transaction(block1.vtx[0], 0, b'\x64' * 35, 50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True)

        # Make two p2p connections to provide the node with orphans
        # * p2ps[0] will send valid orphan txs (one with low fee)
        # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
        self.reconnect_p2p(num_connections=2)

        self.log.info('Test orphan transaction handling ... ')
        # Create a root transaction that we withhold until all dependend transactions
        # are sent out and in the orphan cache
        SCRIPT_PUB_KEY_OP_TRUE = b'\x51\x75' * 15 + b'\x51'
        tx_withhold = CTransaction()
        tx_withhold.vin.append(CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0)))
        tx_withhold.vout.append(CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_withhold.calc_sha256()

        # Our first orphan tx with some outputs to create further orphan txs
        tx_orphan_1 = CTransaction()
        tx_orphan_1.vin.append(CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0)))
        tx_orphan_1.vout = [CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)] * 3
        tx_orphan_1.calc_sha256()

        # A valid transaction with low fee
        tx_orphan_2_no_fee = CTransaction()
        tx_orphan_2_no_fee.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0)))
        tx_orphan_2_no_fee.vout.append(CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        # A valid transaction with sufficient fee
        tx_orphan_2_valid = CTransaction()
        tx_orphan_2_valid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1)))
        tx_orphan_2_valid.vout.append(CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_orphan_2_valid.calc_sha256()

        # An invalid transaction with negative fee
        tx_orphan_2_invalid = CTransaction()
        tx_orphan_2_invalid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2)))
        tx_orphan_2_invalid.vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        self.log.info('Send the orphans ... ')
        # Send valid orphan txs from p2ps[0]
        node.p2p.send_txs_and_test([tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False)
        # Send invalid tx from p2ps[1]
        node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid], node, success=False)

        assert_equal(0, node.getmempoolinfo()['size'])  # Mempool should be empty
        assert_equal(2, len(node.getpeerinfo()))  # p2ps[1] is still connected

        self.log.info('Send the withhold tx ... ')
        node.p2p.send_txs_and_test([tx_withhold], node, success=True)

        # Transactions that should end up in the mempool
        expected_mempool = {
            t.hash
            for t in [
                tx_withhold,  # The transaction that is the root for all orphans
                tx_orphan_1,  # The orphan transaction that splits the coins
                tx_orphan_2_valid,  # The valid transaction (with sufficient fee)
            ]
        }
        # Transactions that do not end up in the mempool
        # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
        # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)

        wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12)  # p2ps[1] is no longer connected
        assert_equal(expected_mempool, set(node.getrawmempool()))

        # restart node with sending BIP61 messages disabled, check that it disconnects without sending the reject message
        self.log.info('Test a transaction that is rejected, with BIP61 disabled')
        self.restart_node(0, ['-enablebip61=0','-persistmempool=0'])
        self.reconnect_p2p(num_connections=1)
        node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True)
        # send_txs_and_test will have waited for disconnect, so we can safely check that no reject has been received
        assert_equal(node.p2p.reject_code_received, None)
Example #7
0
    def get_tests(self):
        if self.tip is None:
            self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
        self.block_time = int(time.time())+1

        '''
        Create a new block with an anyone-can-spend coinbase
        '''
        block = create_block(self.tip, create_coinbase(), self.block_time)
        self.block_time += 1
        block.solve()
        # Save the coinbase for later
        self.block1 = block
        self.tip = block.sha256
        yield TestInstance([[block, True]])

        '''
        Now we need that block to mature so we can spend the coinbase.
        '''
        test = TestInstance(sync_every_block=False)
        for i in xrange(100):
            block = create_block(self.tip, create_coinbase(), self.block_time)
            block.solve()
            self.tip = block.sha256
            self.block_time += 1
            test.blocks_and_transactions.append([block, True])
        yield test

        '''
        Now we use merkle-root malleability to generate an invalid block with
        same blockheader.
        Manufacture a block with 3 transactions (coinbase, spend of prior
        coinbase, spend of that spend).  Duplicate the 3rd transaction to 
        leave merkle root and blockheader unchanged but invalidate the block.
        '''
        block2 = create_block(self.tip, create_coinbase(), self.block_time)
        self.block_time += 1

        # chr(81) is OP_TRUE
        tx1 = create_transaction(self.block1.vtx[0], 0, chr(81), 40*100000000)
        tx2 = create_transaction(tx1, 0, chr(81), 40*100000000)

        block2.vtx.extend([tx1, tx2])
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.rehash()
        block2.solve()
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        block2.vtx.append(tx2)
        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
        assert_equal(orig_hash, block2.rehash())
        assert(block2_orig.vtx != block2.vtx)

        self.tip = block2.sha256
        yield TestInstance([[block2, False], [block2_orig, True]])

        '''
        Make sure that a totally screwed up block is not valid.
        '''
        block3 = create_block(self.tip, create_coinbase(), self.block_time)
        self.block_time += 1
        block3.vtx[0].vout[0].nValue = 100*100000000 # Too high!
        block3.vtx[0].sha256=None
        block3.vtx[0].calc_sha256()
        block3.hashMerkleRoot = block3.calc_merkle_root()
        block3.rehash()
        block3.solve()

        yield TestInstance([[block3, False]])
    def run_test(self):

        self.stop_node(0)
        with self.run_node_with_connections("test getblocktemplate RPC call",
                                            0, ["-blockmintxfee=0.0000001"],
                                            1) as connections:

            connection = connections[0]

            # Preparation.
            self.chain.set_genesis_hash(
                int(self.nodes[0].getbestblockhash(), 16))
            starting_blocks = 101
            block_count = 0
            for i in range(starting_blocks):
                block = self.chain.next_block(block_count)
                block_count += 1
                self.chain.save_spendable_output()
                connection.cb.send_message(msg_block(block))
            out = []
            for i in range(starting_blocks):
                out.append(self.chain.get_spendable_output())
            self.nodes[0].waitforblockheight(starting_blocks)

            # Create and send 2 transactions.
            transactions = []
            for i in range(2):
                tx = create_transaction(out[i].tx, out[i].n, b"", 100000,
                                        CScript([OP_TRUE]))
                connection.cb.send_message(msg_tx(tx))
                transactions.append(tx)
            self.check_mempool(self.nodes[0], transactions)

            # Create large transaction that depends on previous two transactions.
            # If transaction pubkey contains 1/2 of BUFFER_SIZE_HttpTextWriter of data, it means that final result will for sure be chunked.
            largeTx = self.createLargeTransaction(
                int(BUFFER_SIZE_HttpTextWriter / 2), transactions)
            connection.cb.send_message(msg_tx(largeTx))
            self.check_mempool(self.nodes[0], [largeTx])

            # Check getblocktemplate response.
            template = self.nodes[0].getblocktemplate()
            self.checkBlockTemplate(template, transactions, largeTx)

            # Check getblocktemplate with invalid reponse
            block = self.create_bad_block(template)
            rsp = self.nodes[0].getblocktemplate({
                'data': b2x(block.serialize()),
                'mode': 'proposal'
            })
            assert_equal(rsp, "inconclusive-not-best-prevblk")

            assert_raises_rpc_error(-22, "Block decode failed",
                                    self.nodes[0].getblocktemplate, {
                                        'data': b2x(block.serialize()[:-1]),
                                        'mode': 'proposal'
                                    })

            # Test getblocktemplate in a batch
            batch = self.nodes[0].batch([
                self.nodes[0].getblockbyheight.get_request(100),
                self.nodes[0].getblocktemplate.get_request(),
                self.nodes[0].getblockcount.get_request(),
                self.nodes[0].undefinedmethod.get_request()
            ])

            assert_equal(batch[0]["error"], None)
            assert_equal(batch[1]["error"], None)
            assert_equal(batch[1]["result"], template)
            assert_equal(batch[2]["error"], None)
            assert_equal(batch[3]["error"]["message"], "Method not found")
            assert_equal(batch[3]["result"], None)
Example #9
0
    def run_test(self):
        self.address = self.nodes[0].getnewaddress()
        self.ms_address = self.nodes[0].addmultisigaddress(
            1, [self.address])['address']
        self.wit_address = self.nodes[0].getnewaddress(
            address_type='p2sh-segwit')
        self.wit_ms_address = self.nodes[0].addmultisigaddress(
            1, [self.address], '', 'p2sh-segwit')['address']

        self.coinbase_blocks = self.nodes[0].generate(2)  # Block 2
        coinbase_txid = []
        for i in self.coinbase_blocks:
            coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
        self.nodes[0].generate(427)  # Block 429
        self.lastblockhash = self.nodes[0].getbestblockhash()
        self.tip = int("0x" + self.lastblockhash, 0)
        self.lastblockheight = 429
        self.lastblocktime = int(time.time()) + 429

        self.log.info(
            "Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]"
        )
        test1txs = [
            create_transaction(self.nodes[0],
                               coinbase_txid[0],
                               self.ms_address,
                               amount=49)
        ]
        txid1 = self.nodes[0].sendrawtransaction(
            test1txs[0].serialize_with_witness().hex(), 0)
        test1txs.append(
            create_transaction(self.nodes[0],
                               txid1,
                               self.ms_address,
                               amount=48))
        txid2 = self.nodes[0].sendrawtransaction(
            test1txs[1].serialize_with_witness().hex(), 0)
        test1txs.append(
            create_transaction(self.nodes[0],
                               coinbase_txid[1],
                               self.wit_ms_address,
                               amount=49))
        txid3 = self.nodes[0].sendrawtransaction(
            test1txs[2].serialize_with_witness().hex(), 0)
        self.block_submit(self.nodes[0], test1txs, False, True)

        self.log.info(
            "Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation"
        )
        test2tx = create_transaction(self.nodes[0],
                                     txid2,
                                     self.ms_address,
                                     amount=47)
        trueDummy(test2tx)
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR,
                                self.nodes[0].sendrawtransaction,
                                test2tx.serialize_with_witness().hex(), 0)

        self.log.info(
            "Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]"
        )
        self.block_submit(self.nodes[0], [test2tx], False, True)

        self.log.info(
            "Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation"
        )
        test4tx = create_transaction(self.nodes[0],
                                     test2tx.hash,
                                     self.address,
                                     amount=46)
        test6txs = [CTransaction(test4tx)]
        trueDummy(test4tx)
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR,
                                self.nodes[0].sendrawtransaction,
                                test4tx.serialize_with_witness().hex(), 0)
        self.block_submit(self.nodes[0], [test4tx])

        self.log.info(
            "Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation"
        )
        test5tx = create_transaction(self.nodes[0],
                                     txid3,
                                     self.wit_address,
                                     amount=48)
        test6txs.append(CTransaction(test5tx))
        test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR,
                                self.nodes[0].sendrawtransaction,
                                test5tx.serialize_with_witness().hex(), 0)
        self.block_submit(self.nodes[0], [test5tx], True)

        self.log.info(
            "Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]"
        )
        for i in test6txs:
            self.nodes[0].sendrawtransaction(i.serialize_with_witness().hex(),
                                             0)
        self.block_submit(self.nodes[0], test6txs, True, True)
Example #10
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PDataStore())

        self.log.info("Generate blocks in the past for coinbase outputs.")
        start_time = 1510247077 + 600 * 1000 + 101
        long_past_time = start_time - 600 * 1000  # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(
            long_past_time - 100
        )  # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(
            1 + 16 + 2 * 32 + 1)  # 82 blocks generated for inputs
        self.nodes[0].setmocktime(
            0
        )  # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82  # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.nodeaddress = self.nodes[0].getnewaddress()

        # Activation height is hardcoded
        test_blocks = self.generate_blocks(345)
        self.send_blocks(test_blocks)
        assert not softfork_active(self.nodes[0], 'csv')

        # Inputs at height = 431
        #
        # Put inputs for all tests in the chain at height 431 (tip now = 430) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for i in range(16):
            bip68inputs.append(
                send_generic_unspent_input_tx(self.nodes[0],
                                              self.unspents.pop(),
                                              self.nodeaddress))

        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(
                    send_generic_unspent_input_tx(self.nodes[0],
                                                  self.unspents.pop(),
                                                  self.nodeaddress))
            bip112basicinputs.append(inputs)

        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(
                    send_generic_unspent_input_tx(self.nodes[0],
                                                  self.unspents.pop(),
                                                  self.nodeaddress))
            bip112diverseinputs.append(inputs)

        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = send_generic_unspent_input_tx(
            self.nodes[0], self.unspents.pop(), self.nodeaddress)

        # 1 normal input
        bip113input = send_generic_unspent_input_tx(self.nodes[0],
                                                    self.unspents.pop(),
                                                    self.nodeaddress)

        self.nodes[0].setmocktime(self.last_block_time + 600)
        inputblockhash = self.nodes[0].generate(1)[
            0]  # 1 block generated for inputs to be in chain at height 431
        self.nodes[0].setmocktime(0)
        self.tip = int(inputblockhash, 16)
        self.tipheight += 1
        self.last_block_time += 600
        assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]),
                     82 + 1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(2)
        self.send_blocks(test_blocks)

        assert_equal(self.tipheight, CSV_ACTIVATION_HEIGHT - 2)
        self.log.info(
            "Height = {}, CSV not yet active (will activate for block {}, not {})"
            .format(self.tipheight, CSV_ACTIVATION_HEIGHT,
                    CSV_ACTIVATION_HEIGHT - 1))
        assert not softfork_active(self.nodes[0], 'csv')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = create_transaction(self.nodes[0],
                                         bip113input,
                                         self.nodeaddress,
                                         amount=Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = create_transaction(self.nodes[0],
                                         bip113input,
                                         self.nodeaddress,
                                         amount=Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1,
                                      self.nodeaddress)
        bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2,
                                      self.nodeaddress)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0],
                                                       bip112basicinputs[0],
                                                       False, 1,
                                                       self.nodeaddress)
        bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0],
                                                       bip112basicinputs[0],
                                                       False, 2,
                                                       self.nodeaddress)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = create_bip112txs(
            self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress,
            -1)
        bip112txs_vary_nSequence_9_v2 = create_bip112txs(
            self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress,
            -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0],
                                                    bip112diverseinputs[0],
                                                    True, 1, self.nodeaddress)
        bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0],
                                                    bip112diverseinputs[0],
                                                    True, 2, self.nodeaddress)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0],
                                                      bip112diverseinputs[1],
                                                      True, 1,
                                                      self.nodeaddress, -1)
        bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0],
                                                      bip112diverseinputs[1],
                                                      True, 2,
                                                      self.nodeaddress, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = create_bip112special(self.nodes[0],
                                                   bip112specialinput, 1,
                                                   self.nodeaddress)
        bip112tx_special_v2 = create_bip112special(self.nodes[0],
                                                   bip112specialinput, 2,
                                                   self.nodeaddress)

        self.log.info("TESTING")

        self.log.info("Pre-Soft Fork Tests. All txs should pass.")
        self.log.info("Test version 1 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        self.send_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        self.send_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # 1 more version 4 block to get us to height 432 so the fork should now be active for the next block
        assert not softfork_active(self.nodes[0], 'csv')
        test_blocks = self.generate_blocks(1)
        self.send_blocks(test_blocks)
        assert softfork_active(self.nodes[0], 'csv')

        self.log.info("Post-Soft Fork Tests.")

        self.log.info("BIP 113 tests")
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.send_blocks([self.create_test_block([bip113tx])],
                             success=False)
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.send_blocks([self.create_test_block([bip113tx])])
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 437 after 4 blocks of random version
        test_blocks = self.generate_blocks(4)
        self.send_blocks(test_blocks)

        self.log.info("BIP 68 tests")
        self.log.info("Test version 1 txs - all should still pass")

        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        self.send_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
        self.send_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = [
            tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']
        ]
        for tx in bip68timetxs:
            self.send_blocks([self.create_test_block([tx])], success=False)

        bip68heighttxs = [
            tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']
        ]
        for tx in bip68heighttxs:
            self.send_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 438
        test_blocks = self.generate_blocks(1)
        self.send_blocks(test_blocks)

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        self.send_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            self.send_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 439
        test_blocks = self.generate_blocks(1)
        self.send_blocks(test_blocks)

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        self.send_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("BIP 112 tests")
        self.log.info("Test version 1 txs")

        # -1 OP_CSV tx should fail
        self.send_blocks([self.create_test_block([bip112tx_special_v1])],
                         success=False)
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass

        success_txs = [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']
        ]
        success_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']
        ]
        self.send_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
        fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']
        ]
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']
        ]
        for tx in fail_txs:
            self.send_blocks([self.create_test_block([tx])], success=False)

        self.log.info("Test version 2 txs")

        # -1 OP_CSV tx should fail
        self.send_blocks([self.create_test_block([bip112tx_special_v2])],
                         success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']
        ]
        success_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']
        ]

        self.send_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##

        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']
        ]
        for tx in fail_txs:
            self.send_blocks([self.create_test_block([tx])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = [
            tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']
        ]
        for tx in fail_txs:
            self.send_blocks([self.create_test_block([tx])], success=False)

        # If sequencelock types mismatch, tx should fail
        fail_txs = [
            tx['tx'] for tx in bip112txs_vary_nSequence_v2
            if not tx['sdf'] and tx['stf']
        ]
        fail_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v2
            if not tx['sdf'] and tx['stf']
        ]
        for tx in fail_txs:
            self.send_blocks([self.create_test_block([tx])], success=False)

        # Remaining txs should pass, just test masking works properly
        success_txs = [
            tx['tx'] for tx in bip112txs_vary_nSequence_v2
            if not tx['sdf'] and not tx['stf']
        ]
        success_txs += [
            tx['tx'] for tx in bip112txs_vary_OP_CSV_v2
            if not tx['sdf'] and not tx['stf']
        ]
        self.send_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for tx in [
                tx['tx'] for tx in bip112txs_vary_OP_CSV_v2
                if not tx['sdf'] and tx['stf']
        ]:
            tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
            signtx = sign_transaction(self.nodes[0], tx)
            time_txs.append(signtx)

        self.send_blocks([self.create_test_block(time_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
Example #11
0
    def run_test(self):
        genesis_key = default_config["GENESIS_PRI_KEY"]
        balance_map = {genesis_key: default_config["TOTAL_COIN"]}
        self.log.info("Initial State: (sk:%d, addr:%s, balance:%d)",
                      bytes_to_int(genesis_key),
                      eth_utils.encode_hex(privtoaddr(genesis_key)),
                      balance_map[genesis_key])
        nonce_map = {genesis_key: 0}
        '''Check if transaction from uncommitted new address can be accepted'''
        tx_n = 5
        receiver_sk = genesis_key
        gas_price = 1
        for i in range(tx_n):
            sender_key = receiver_sk
            value = int((balance_map[sender_key] -
                         ((tx_n - i) * 21000 * gas_price)) * random.random())
            nonce = nonce_map[sender_key]
            receiver_sk, _ = ec_random_keys()
            nonce_map[receiver_sk] = 0
            balance_map[receiver_sk] = value
            tx = create_transaction(pri_key=sender_key,
                                    receiver=privtoaddr(receiver_sk),
                                    value=value,
                                    nonce=nonce,
                                    gas_price=gas_price)
            r = random.randint(0, self.num_nodes - 1)
            self.nodes[r].p2p.send_protocol_msg(
                Transactions(transactions=[tx]))
            nonce_map[sender_key] = nonce + 1
            balance_map[sender_key] -= value + gas_price * 21000
            self.log.debug(
                "New tx %s: %s send value %d to %s, sender balance:%d, receiver balance:%d",
                encode_hex(tx.hash),
                eth_utils.encode_hex(privtoaddr(sender_key))[-4:], value,
                eth_utils.encode_hex(privtoaddr(receiver_sk))[-4:],
                balance_map[sender_key], balance_map[receiver_sk])
            self.log.debug("Send Transaction %s to node %d",
                           encode_hex(tx.hash), r)
            time.sleep(random.random() / 100)
        block_gen_thread = BlockGenThread(self.nodes,
                                          self.log,
                                          interval_base=0.2)
        block_gen_thread.start()
        for k in balance_map:
            self.log.info("Check account sk:%s addr:%s", bytes_to_int(k),
                          eth_utils.encode_hex(privtoaddr(k)))
            wait_until(lambda: self.check_account(k, balance_map))
        self.log.info("Pass 1")
        '''Test Random Transactions'''
        all_txs = []
        tx_n = 1000
        self.log.info(
            "start to generate %d transactions with about %d seconds", tx_n,
            tx_n / 100 / 2)
        for i in range(tx_n):
            sender_key = random.choice(list(balance_map))
            nonce = nonce_map[sender_key]
            data = b''
            rand_n = random.random()
            gas = 21000
            if rand_n < 0.1 and balance_map[sender_key] > 21000 * 4 * tx_n:
                value = int(balance_map[sender_key] * 0.5)
                receiver_sk, _ = ec_random_keys()
                receiver = privtoaddr(receiver_sk)
                nonce_map[receiver_sk] = 0
                balance_map[receiver_sk] = value
            elif rand_n > 0.9 and balance_map[sender_key] > 21000 * 4 * tx_n:
                value = 0
                receiver = b''
                data = bytes([
                    96, 128, 96, 64, 82, 52, 128, 21, 97, 0, 16, 87, 96, 0,
                    128, 253, 91, 80, 96, 5, 96, 0, 129, 144, 85, 80, 96, 230,
                    128, 97, 0, 39, 96, 0, 57, 96, 0, 243, 254, 96, 128, 96,
                    64, 82, 96, 4, 54, 16, 96, 67, 87, 96, 0, 53, 124, 1, 0, 0,
                    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                    0, 0, 0, 0, 0, 0, 144, 4, 128, 99, 96, 254, 71, 177, 20,
                    96, 72, 87, 128, 99, 109, 76, 230, 60, 20, 96, 127, 87, 91,
                    96, 0, 128, 253, 91, 52, 128, 21, 96, 83, 87, 96, 0, 128,
                    253, 91, 80, 96, 125, 96, 4, 128, 54, 3, 96, 32, 129, 16,
                    21, 96, 104, 87, 96, 0, 128, 253, 91, 129, 1, 144, 128,
                    128, 53, 144, 96, 32, 1, 144, 146, 145, 144, 80, 80, 80,
                    96, 167, 86, 91, 0, 91, 52, 128, 21, 96, 138, 87, 96, 0,
                    128, 253, 91, 80, 96, 145, 96, 177, 86, 91, 96, 64, 81,
                    128, 130, 129, 82, 96, 32, 1, 145, 80, 80, 96, 64, 81, 128,
                    145, 3, 144, 243, 91, 128, 96, 0, 129, 144, 85, 80, 80, 86,
                    91, 96, 0, 128, 84, 144, 80, 144, 86, 254, 161, 101, 98,
                    122, 122, 114, 48, 88, 32, 181, 24, 13, 149, 253, 195, 129,
                    48, 40, 237, 71, 246, 44, 124, 223, 112, 139, 118, 192,
                    219, 9, 64, 67, 245, 51, 180, 42, 67, 13, 49, 62, 21, 0, 41
                ])
                gas = 10000000
            else:
                value = 1
                receiver_sk = random.choice(list(balance_map))
                receiver = privtoaddr(receiver_sk)
                balance_map[receiver_sk] += value
            # not enough transaction fee (gas_price * gas_limit) should not happen for now
            assert balance_map[sender_key] >= value + gas_price * 21000
            tx = create_transaction(pri_key=sender_key,
                                    receiver=receiver,
                                    value=value,
                                    nonce=nonce,
                                    gas_price=gas_price,
                                    data=data,
                                    gas=gas)
            r = random.randint(0, self.num_nodes - 1)
            self.nodes[r].p2p.send_protocol_msg(
                Transactions(transactions=[tx]))
            all_txs.append(tx)
            nonce_map[sender_key] = nonce + 1
            balance_map[sender_key] -= value + gas_price * gas
            self.log.debug(
                "New tx %s: %s send value %d to %s, sender balance:%d, receiver balance:%d nonce:%d",
                encode_hex(tx.hash),
                eth_utils.encode_hex(privtoaddr(sender_key))[-4:], value,
                eth_utils.encode_hex(privtoaddr(receiver_sk))[-4:],
                balance_map[sender_key], balance_map[receiver_sk], nonce)
            self.log.debug("Send Transaction %s to node %d",
                           encode_hex(tx.hash), r)
            time.sleep(random.random() / 100)
        for k in balance_map:
            self.log.info("Account %s with balance:%s", bytes_to_int(k),
                          balance_map[k])
        for tx in all_txs:
            self.log.debug("Wait for tx to confirm %s", tx.hash_hex())
            for i in range(3):
                try:
                    retry = True
                    while retry:
                        try:
                            wait_until(
                                lambda: checktx(self.nodes[0], tx.hash_hex()),
                                timeout=120)
                            retry = False
                        except CannotSendRequest:
                            time.sleep(0.01)
                    break
                except AssertionError as _:
                    self.nodes[0].p2p.send_protocol_msg(
                        Transactions(transactions=[tx]))
                if i == 2:
                    raise AssertionError(
                        "Tx {} not confirmed after 30 seconds".format(
                            tx.hash_hex()))

        for k in balance_map:
            self.log.info("Check account sk:%s addr:%s", bytes_to_int(k),
                          eth_utils.encode_hex(privtoaddr(k)))
            wait_until(lambda: self.check_account(k, balance_map))
        block_gen_thread.stop()
        block_gen_thread.join()
        sync_blocks(self.nodes)
        self.log.info("Pass")
Example #12
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PDataStore())

        self.log.info("Generate blocks in the past for coinbase outputs.")
        long_past_time = int(time.time()) - 600 * 1000  # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(long_past_time - 100)  # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2 * 32 + 1)  # 82 blocks generated for inputs
        self.nodes[0].setmocktime(0)  # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82  # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that the csv softfork is DEFINED")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
        test_blocks = self.generate_blocks(61, 4)
        self.sync_blocks(test_blocks)

        self.log.info("Advance from DEFINED to STARTED, height = 143")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Fail to achieve LOCKED_IN")
        # 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks

        test_blocks = self.generate_blocks(50, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(20, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(24, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Failed to advance past STARTED, height = 287")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Generate blocks to achieve LOCK-IN")
        # 108 out of 144 signal bit 0 to achieve lock-in
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(58, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(26, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(10, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Generate 140 more version 4 blocks
        test_blocks = self.generate_blocks(140, 4)
        self.sync_blocks(test_blocks)

        # Inputs at height = 572
        #
        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for i in range(16):
            bip68inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))

        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
            bip112basicinputs.append(inputs)

        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
            bip112diverseinputs.append(inputs)

        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)

        # 1 normal input
        bip113input = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)

        self.nodes[0].setmocktime(self.last_block_time + 600)
        inputblockhash = self.nodes[0].generate(1)[0]  # 1 block generated for inputs to be in chain at height 572
        self.nodes[0].setmocktime(0)
        self.tip = int(inputblockhash, 16)
        self.tipheight += 1
        self.last_block_time += 600
        assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(2, 4)
        self.sync_blocks(test_blocks)

        self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress)
        bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress)
        bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1)
        bip112txs_vary_nSequence_9_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress)
        bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1)
        bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress)
        bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress)

        self.log.info("TESTING")

        self.log.info("Pre-Soft Fork Tests. All txs should pass.")
        self.log.info("Test version 1 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
        test_blocks = self.generate_blocks(1, 4)
        self.sync_blocks(test_blocks)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        self.log.info("Post-Soft Fork Tests.")

        self.log.info("BIP 113 tests")
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])], success=False)
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])])
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 580 after 4 blocks of random version
        test_blocks = self.generate_blocks(4, 1234)
        self.sync_blocks(test_blocks)

        self.log.info("BIP 68 tests")
        self.log.info("Test version 1 txs - all should still pass")

        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
        for tx in bip68timetxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 581
        test_blocks = self.generate_blocks(1, 1234)
        self.sync_blocks(test_blocks)

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 582
        test_blocks = self.generate_blocks(1, 1234)
        self.sync_blocks(test_blocks)

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("BIP 112 tests")
        self.log.info("Test version 1 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass

        success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
        fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        self.log.info("Test version 2 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]

        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##

        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If sequencelock types mismatch, tx should fail
        fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Remaining txs should pass, just test masking works properly
        success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]:
            tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
            signtx = sign_transaction(self.nodes[0], tx)
            time_txs.append(signtx)

        self.sync_blocks([self.create_test_block(time_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
Example #13
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining {} blocks".format(DERSIG_HEIGHT - 1))
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(DERSIG_HEIGHT - 1)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that transactions with non-DER signatures cannot appear in a block"
        )
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT),
                             block_time)
        block.nHeight = DERSIG_HEIGHT
        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[1],
                                     self.nodeaddress,
                                     amount=1.0,
                                     vout=1)
        unDERify(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for DERSIG by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.txid_hex,
            'allowed':
            False,
            'reject-reason':
            'mandatory-script-verify-flag-failed (Non-canonical DER signature)'
        }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()],
                                            maxfeerate=0))

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        prepare_block(block)

        with self.nodes[0].assert_debug_log(expected_msgs=[
                'ConnectBlock {} failed, blk-bad-inputs'.format(block.hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(self.nodes[0].getbestblockhash(), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that a version 3 block with a DERSIG-compliant transaction is accepted"
        )
        block.vtx[1] = create_transaction(self.nodes[0],
                                          self.coinbase_txids[1],
                                          self.nodeaddress,
                                          amount=1.0,
                                          vout=1)
        prepare_block(block)

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
Example #14
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(DERSIG_HEIGHT - 2)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that a transaction with non-DER signature can still appear in a block"
        )

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[0],
                                     self.nodeaddress,
                                     amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1),
                             block_time)
        block.nVersion = 2
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 3")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time)
        block.nVersion = 2
        block.rehash()
        block.solve()
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(),
                   lock=mininode_lock)
        with mininode_lock:
            assert_equal(self.nodes[0].p2p.last_message["reject"].code,
                         REJECT_OBSOLETE)
            assert_equal(self.nodes[0].p2p.last_message["reject"].reason,
                         b'bad-version(0x00000002)')
            assert_equal(self.nodes[0].p2p.last_message["reject"].data,
                         block.sha256)
            del self.nodes[0].p2p.last_message["reject"]

        self.log.info(
            "Test that transactions with non-DER signatures cannot appear in a block"
        )
        block.nVersion = 3

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[1],
                                     self.nodeaddress,
                                     amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for DERSIG by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.hash,
            'allowed':
            False,
            'reject-reason':
            '64: non-mandatory-script-verify-flag (Non-canonical DER signature)'
        }],
                     self.nodes[0].testmempoolaccept(
                         rawtxs=[bytes_to_hex_str(spendtx.serialize())],
                         allowhighfees=True))

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(),
                   lock=mininode_lock)
        with mininode_lock:
            # We can receive different reject messages depending on whether
            # catrad is running with multiple script check threads. If script
            # check threads are not in use, then transaction script validation
            # happens sequentially, and catrad produces more specific reject
            # reasons.
            assert self.nodes[0].p2p.last_message["reject"].code in [
                REJECT_INVALID, REJECT_NONSTANDARD
            ]
            assert_equal(self.nodes[0].p2p.last_message["reject"].data,
                         block.sha256)
            if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID:
                # Generic rejection when a block is invalid
                assert_equal(self.nodes[0].p2p.last_message["reject"].reason,
                             b'block-validation-failed')
            else:
                assert b'Non-canonical DER signature' in self.nodes[
                    0].p2p.last_message["reject"].reason

        self.log.info(
            "Test that a version 3 block with a DERSIG-compliant transaction is accepted"
        )
        block.vtx[1] = create_transaction(self.nodes[0],
                                          self.coinbase_txids[1],
                                          self.nodeaddress,
                                          amount=1.0)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
    def run_test(self):
        start_p2p_connection(self.nodes)
        block_gen_thread = BlockGenThread(self.nodes, self.log)
        block_gen_thread.start()

        file_dir = os.path.dirname(os.path.realpath(__file__))

        self.log.info("Initializing contract")

        self.buggy_contract = get_contract_instance(source=os.path.join(
            file_dir, "contracts/reentrancy.sol"),
                                                    contract_name="Reentrance")
        self.exploit_contract = get_contract_instance(
            source=os.path.join(file_dir, "contracts/reentrancy_exploit.sol"),
            contract_name="ReentranceExploit")

        user1, _ = ec_random_keys()
        user1_addr = priv_to_addr(user1)
        user2, _ = ec_random_keys()
        user2_addr = priv_to_addr(user2)

        # setup balance
        value = (10**15 + 2000) * 10**18 + ReentrancyTest.REQUEST_BASE['gas']
        tx = create_transaction(pri_key=self.genesis_priv_key,
                                receiver=user1_addr,
                                value=value,
                                nonce=self.get_nonce(self.genesis_addr),
                                gas_price=ReentrancyTest.REQUEST_BASE['gas'])
        self.send_transaction(tx, True, False)

        tx = create_transaction(pri_key=self.genesis_priv_key,
                                receiver=user2_addr,
                                value=value,
                                nonce=self.get_nonce(self.genesis_addr),
                                gas_price=ReentrancyTest.REQUEST_BASE['gas'])
        self.send_transaction(tx, True, False)

        addr = eth_utils.encode_hex(user1_addr)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(addr))
        assert_equal(balance, value)
        addr = eth_utils.encode_hex(user2_addr)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(addr))
        assert_equal(balance, value)

        transaction = self.call_contract_function(self.buggy_contract,
                                                  "constructor", [],
                                                  self.genesis_priv_key,
                                                  storage_limit=20000)
        contract_addr = self.wait_for_tx([transaction],
                                         True)[0]['contractCreated']

        transaction = self.call_contract_function(self.exploit_contract,
                                                  "constructor", [],
                                                  user2,
                                                  storage_limit=200000)
        exploit_addr = self.wait_for_tx([transaction],
                                        True)[0]['contractCreated']

        transaction = self.call_contract_function(
            self.buggy_contract,
            "addBalance", [],
            user1,
            100000000000000000000000000000000,
            contract_addr,
            True,
            True,
            storage_limit=128)
        transaction = self.call_contract_function(
            self.exploit_contract,
            "deposit", [Web3.toChecksumAddress(contract_addr)],
            user2,
            100000000000000000000000000000000,
            exploit_addr,
            True,
            True,
            storage_limit=128)

        addr = eth_utils.encode_hex(user1_addr)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(addr))
        assert_greater_than_or_equal(balance,
                                     899999999999999999999999950000000)
        addr = eth_utils.encode_hex(user2_addr)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(addr))
        assert_greater_than_or_equal(balance,
                                     899999999999999999999999900000000)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(contract_addr))
        assert_equal(balance, 200000000000000000000000000000000)

        transaction = self.call_contract_function(self.exploit_contract,
                                                  "launch_attack", [],
                                                  user2,
                                                  0,
                                                  exploit_addr,
                                                  True,
                                                  True,
                                                  storage_limit=128)
        transaction = self.call_contract_function(self.exploit_contract,
                                                  "get_money", [],
                                                  user2,
                                                  0,
                                                  exploit_addr,
                                                  True,
                                                  True,
                                                  storage_limit=128)

        addr = eth_utils.encode_hex(user1_addr)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(addr))
        assert_greater_than_or_equal(balance,
                                     899999999999999999999999950000000)
        addr = eth_utils.encode_hex(user2_addr)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(addr))
        assert_greater_than_or_equal(balance,
                                     1099999999999999999999999800000000)
        balance = parse_as_int(self.nodes[0].cfx_getBalance(contract_addr))
        assert_equal(balance, 0)

        block_gen_thread.stop()
        block_gen_thread.join()
Example #16
0
    def run_test(self):
        genesis_key = default_config["GENESIS_PRI_KEY"]
        balance_map = {genesis_key: default_config["TOTAL_COIN"]}
        self.log.info("Initial State: (sk:%d, addr:%s, balance:%d)", bytes_to_int(genesis_key),
                      eth_utils.encode_hex(privtoaddr(genesis_key)), balance_map[genesis_key])
        nonce_map = {genesis_key: 0}

        '''Check if transaction from uncommitted new address can be accepted'''
        tx_n = 5
        receiver_sk = genesis_key
        gas_price = 1
        for i in range(tx_n):
            sender_key = receiver_sk
            value = int((balance_map[sender_key] - ((tx_n - i) * 21000 * gas_price)) * random.random())
            nonce = nonce_map[sender_key]
            receiver_sk, _ = ec_random_keys()
            nonce_map[receiver_sk] = 0
            balance_map[receiver_sk] = value
            tx = create_transaction(pri_key=sender_key, receiver=privtoaddr(receiver_sk), value=value, nonce=nonce,
                                    gas_price=gas_price)
            r = random.randint(0, self.num_nodes - 1)
            self.nodes[r].p2p.send_protocol_msg(Transactions(transactions=[tx]))
            nonce_map[sender_key] = nonce + 1
            balance_map[sender_key] -= value + gas_price * 21000
            self.log.debug("New tx %s: %s send value %d to %s, sender balance:%d, receiver balance:%d", encode_hex(tx.hash), eth_utils.encode_hex(privtoaddr(sender_key))[-4:],
                           value, eth_utils.encode_hex(privtoaddr(receiver_sk))[-4:], balance_map[sender_key], balance_map[receiver_sk])
            self.log.debug("Send Transaction %s to node %d", encode_hex(tx.hash), r)
            time.sleep(random.random() / 100)
        block_gen_thread = BlockGenThread(self.nodes, self.log, interval_base=0.2)
        block_gen_thread.start()
        for k in balance_map:
            self.log.info("Check account sk:%s addr:%s", bytes_to_int(k), eth_utils.encode_hex(privtoaddr(k)))
            wait_until(lambda: self.check_account(k, balance_map))
        self.log.info("Pass 1")

        '''Test Random Transactions'''
        all_txs = []
        tx_n = 1000
        self.log.info("start to generate %d transactions with about %d seconds", tx_n, tx_n/100/2)
        for i in range(tx_n):
            sender_key = random.choice(list(balance_map))
            nonce = nonce_map[sender_key]
            if random.random() < 0.1 and balance_map[sender_key] > 21000 * 4 * tx_n:
                value = int(balance_map[sender_key] * 0.5)
                receiver_sk, _ = ec_random_keys()
                nonce_map[receiver_sk] = 0
                balance_map[receiver_sk] = value
            else:
                value = 1
                receiver_sk = random.choice(list(balance_map))
                balance_map[receiver_sk] += value
            # not enough transaction fee (gas_price * gas_limit) should not happen for now
            assert balance_map[sender_key] >= value + gas_price * 21000
            tx = create_transaction(pri_key=sender_key, receiver=privtoaddr(receiver_sk), value=value, nonce=nonce,
                                    gas_price=gas_price)
            r = random.randint(0, self.num_nodes - 1)
            self.nodes[r].p2p.send_protocol_msg(Transactions(transactions=[tx]))
            all_txs.append(tx)
            nonce_map[sender_key] = nonce + 1
            balance_map[sender_key] -= value + gas_price * 21000
            self.log.debug("New tx %s: %s send value %d to %s, sender balance:%d, receiver balance:%d nonce:%d", encode_hex(tx.hash), eth_utils.encode_hex(privtoaddr(sender_key))[-4:],
                          value, eth_utils.encode_hex(privtoaddr(receiver_sk))[-4:], balance_map[sender_key], balance_map[receiver_sk], nonce)
            self.log.debug("Send Transaction %s to node %d", encode_hex(tx.hash), r)
            time.sleep(random.random() / 100)
        for k in balance_map:
            self.log.info("Account %s with balance:%s", bytes_to_int(k), balance_map[k])
        for tx in all_txs:
            self.log.debug("Wait for tx to confirm %s", tx.hash_hex())
            for i in range(3):
                try:
                    retry = True
                    while retry:
                        try:
                            wait_until(lambda: checktx(self.nodes[0], tx.hash_hex()), timeout=120)
                            retry = False
                        except CannotSendRequest:
                            time.sleep(0.01)
                    break
                except AssertionError as _:
                    self.nodes[0].p2p.send_protocol_msg(Transactions(transactions=[tx]))
                if i == 2:
                    raise AssertionError("Tx {} not confirmed after 30 seconds".format(tx.hash_hex()))

        for k in balance_map:
            self.log.info("Check account sk:%s addr:%s", bytes_to_int(k), eth_utils.encode_hex(privtoaddr(k)))
            wait_until(lambda: self.check_account(k, balance_map))
        block_gen_thread.stop()
        block_gen_thread.join()
        sync_blocks(self.nodes)
        self.log.info("Pass")
Example #17
0
    def run_test(self):
        file_dir = os.path.dirname(os.path.realpath(__file__))
        erc20_contract = get_contract_instance(
            abi_file=os.path.join(file_dir, "contracts/erc20_abi.json"),
            bytecode_file=os.path.join(file_dir,
                                       "contracts/erc20_bytecode.dat"),
        )

        gas_price = 1
        gas = CONTRACT_DEFAULT_GAS

        start_p2p_connection(self.nodes)

        self.log.info("Initializing contract")
        genesis_key = default_config["GENESIS_PRI_KEY"]
        genesis_addr = encode_hex_0x(priv_to_addr(genesis_key))
        nonce = 0
        block_gen_thread = BlockGenThread(self.nodes, self.log)
        block_gen_thread.start()
        self.tx_conf = {
            "from": Web3.toChecksumAddress(genesis_addr),
            "nonce": int_to_hex(nonce),
            "gas": int_to_hex(gas),
            "gasPrice": int_to_hex(gas_price),
            "chainId": 0
        }
        raw_create = erc20_contract.constructor().buildTransaction(
            self.tx_conf)
        tx_data = decode_hex(raw_create["data"])
        tx_create = create_transaction(pri_key=genesis_key,
                                       receiver=b'',
                                       nonce=nonce,
                                       gas_price=gas_price,
                                       data=tx_data,
                                       gas=gas,
                                       value=0,
                                       storage_limit=3382)
        client = RpcClient(self.nodes[0])
        c0 = client.get_collateral_for_storage(genesis_addr)
        client.send_tx(tx_create, True)
        receipt = client.get_transaction_receipt(tx_create.hash_hex())
        c1 = client.get_collateral_for_storage(genesis_addr)
        assert_equal(c1 - c0, 3382 * 10**18 / 1024)
        contract_addr = receipt['contractCreated']
        self.log.info("Contract " + str(contract_addr) +
                      " created, start transferring tokens")

        tx_n = 10
        self.tx_conf["to"] = contract_addr
        nonce += 1
        balance_map = {genesis_key: 1000000 * 10**18}
        sender_key = genesis_key
        all_txs = []
        for i in range(tx_n):
            value = int((balance_map[sender_key] -
                         ((tx_n - i) * 21000 * gas_price)) * random.random())
            receiver_sk, _ = ec_random_keys()
            balance_map[receiver_sk] = value
            tx_data = decode_hex(
                erc20_contract.functions.transfer(
                    Web3.toChecksumAddress(
                        encode_hex(priv_to_addr(receiver_sk))),
                    value).buildTransaction(self.tx_conf)["data"])
            tx = create_transaction(pri_key=sender_key,
                                    receiver=decode_hex(self.tx_conf["to"]),
                                    value=0,
                                    nonce=nonce,
                                    gas=gas,
                                    gas_price=gas_price,
                                    data=tx_data,
                                    storage_limit=64)
            r = random.randint(0, self.num_nodes - 1)
            self.nodes[r].p2p.send_protocol_msg(
                Transactions(transactions=[tx]))
            nonce += 1
            balance_map[sender_key] -= value
            all_txs.append(tx)
        self.log.info("Wait for transactions to be executed")
        self.wait_for_tx(all_txs)
        self.log.info("Check final token balance")
        for sk in balance_map:
            addr = priv_to_addr(sk)
            assert_equal(self.get_balance(erc20_contract, addr, nonce),
                         balance_map[sk])
        c2 = client.get_collateral_for_storage(genesis_addr)
        assert_equal(c2 - c1, 64 * tx_n * 10**18 / 1024)
        block_gen_thread.stop()
        block_gen_thread.join()
        sync_blocks(self.nodes)
        self.log.info("Pass")
Example #18
0
    def run_test(self):
        block_number = 10

        for i in range(1, block_number):
            chosen_peer = random.randint(0, self.num_nodes - 1)
            block_hash = self.nodes[chosen_peer].generate(1, 0)
            self.log.info("generate block %s", block_hash)
        wait_for_block_count(self.nodes[0], block_number, timeout=30)
        sync_blocks(self.nodes, timeout=30)
        self.log.info("generated blocks received by all")

        self.stop_node(0, kill=True)
        self.log.info("node 0 stopped")
        block_hash = self.nodes[-1].generate(1, 0)
        self.log.info("generate block %s", block_hash)
        wait_for_block_count(self.nodes[1], block_number + 1)
        sync_blocks(self.nodes[1:], timeout=30)
        self.log.info("blocks sync success among running nodes")
        self.start_node(0)
        sync_blocks(self.nodes, timeout=30)
        self.log.info("Pass 1")

        for i in range(1, self.num_nodes):
            self.stop_node(i)
        self.nodes[0].add_p2p_connection(P2PInterface())
        network_thread_start()
        self.nodes[0].p2p.wait_for_status()
        gas_price = 1
        value = 1
        receiver_sk, _ = ec_random_keys()
        sender_key = default_config["GENESIS_PRI_KEY"]
        tx = create_transaction(pri_key=sender_key,
                                receiver=privtoaddr(receiver_sk),
                                value=value,
                                nonce=0,
                                gas_price=gas_price)
        self.nodes[0].p2p.send_protocol_msg(Transactions(transactions=[tx]))
        self.log.debug("New tx %s: %s send value %d to %s",
                       encode_hex(tx.hash),
                       eth_utils.encode_hex(privtoaddr(sender_key))[-4:],
                       value,
                       eth_utils.encode_hex(privtoaddr(receiver_sk))[-4:])

        def check_packed():
            client = RpcClient(self.nodes[0])
            client.generate_block(1)
            return checktx(self.nodes[0], tx.hash_hex())

        wait_until(lambda: check_packed())
        sender_addr = eth_utils.encode_hex(privtoaddr(sender_key))
        receiver_addr = eth_utils.encode_hex(privtoaddr(receiver_sk))
        sender_balance = default_config[
            "TOTAL_COIN"] - value - gas_price * 21000
        self.nodes[0].generate(5, 0)
        assert_equal(parse_as_int(self.nodes[0].cfx_getBalance(sender_addr)),
                     sender_balance)
        assert_equal(parse_as_int(self.nodes[0].cfx_getBalance(receiver_addr)),
                     value)
        time.sleep(1)
        self.stop_node(0)
        self.start_node(0)
        wait_until(lambda: parse_as_int(self.nodes[0].cfx_getBalance(
            sender_addr)) == sender_balance)
        wait_until(lambda: parse_as_int(self.nodes[0].cfx_getBalance(
            receiver_addr)) == value)
        self.log.info("Pass 2")
 def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
     tx = create_transaction(spend_tx, n, b"", value, script)
     return tx
Example #20
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", CLTV_HEIGHT - 2)
        self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(CLTV_HEIGHT - 2)]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that an invalid-according-to-CLTV transaction can still appear in a block")

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0],
                self.nodeaddress, amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1), block_time)
        block.nVersion = 3
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 4")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time)
        block.nVersion = 3
        block.solve()
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
        with mininode_lock:
            assert_equal(self.nodes[0].p2p.last_message["reject"].code, REJECT_OBSOLETE)
            assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'bad-version(0x00000003)')
            assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
            del self.nodes[0].p2p.last_message["reject"]

        self.log.info("Test that invalid-according-to-cltv transactions cannot appear in a block")
        block.nVersion = 4

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[1],
                self.nodeaddress, amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for CLTV by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal(
            [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Negative locktime)'}],
            self.nodes[0].testmempoolaccept(rawtxs=[bytes_to_hex_str(spendtx.serialize())], allowhighfees=True)
        )

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
        with mininode_lock:
            assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD]
            assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
            if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID:
                # Generic rejection when a block is invalid
                assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'block-validation-failed')
            else:
                assert b'Negative locktime' in self.nodes[0].p2p.last_message["reject"].reason

        self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted")
        spendtx = cltv_validate(self.nodes[0], spendtx, CLTV_HEIGHT - 1)
        spendtx.rehash()

        block.vtx.pop(1)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
Example #21
0
    def get_tests(self):
        if self.tip is None:
            self.tip = int("0x" + self.nodes[0].getbestblockhash() + "L", 0)
        self.block_time = int(time.time()) + 1
        '''
        Create a new block with an anyone-can-spend coinbase
        '''
        block = create_block(self.tip, create_coinbase(), self.block_time)
        self.block_time += 1
        block.solve()
        # Save the coinbase for later
        self.block1 = block
        self.tip = block.sha256
        yield TestInstance([[block, True]])
        '''
        Now we need that block to mature so we can spend the coinbase.
        '''
        test = TestInstance(sync_every_block=False)
        for i in xrange(100):
            block = create_block(self.tip, create_coinbase(), self.block_time)
            block.solve()
            self.tip = block.sha256
            self.block_time += 1
            test.blocks_and_transactions.append([block, True])
        yield test
        '''
        Now we use merkle-root malleability to generate an invalid block with
        same blockheader.
        Manufacture a block with 3 transactions (coinbase, spend of prior
        coinbase, spend of that spend).  Duplicate the 3rd transaction to 
        leave merkle root and blockheader unchanged but invalidate the block.
        '''
        block2 = create_block(self.tip, create_coinbase(), self.block_time)
        self.block_time += 1

        # chr(81) is OP_TRUE
        tx1 = create_transaction(self.block1.vtx[0], 0, chr(81),
                                 40 * 100000000)
        tx2 = create_transaction(tx1, 0, chr(81), 40 * 100000000)

        block2.vtx.extend([tx1, tx2])
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.rehash()
        block2.solve()
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        block2.vtx.append(tx2)
        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
        assert_equal(orig_hash, block2.rehash())
        assert (block2_orig.vtx != block2.vtx)

        self.tip = block2.sha256
        yield TestInstance([[block2, False], [block2_orig, True]])
        '''
        Make sure that a totally screwed up block is not valid.
        '''
        block3 = create_block(self.tip, create_coinbase(), self.block_time)
        self.block_time += 1
        block3.vtx[0].vout[0].nValue = 100 * 100000000  # Too high!
        block3.vtx[0].sha256 = None
        block3.vtx[0].calc_sha256()
        block3.hashMerkleRoot = block3.calc_merkle_root()
        block3.rehash()
        block3.solve()

        yield TestInstance([[block3, False]])
    def run_test(self):
        self.address = self.nodes[0].getnewaddress()
        self.ms_address = self.nodes[0].addmultisigaddress(
            1, [self.address])['address']

        self.coinbase_blocks = self.nodes[0].generate(
            2, self.signblockprivkey_wif)  # Block 2
        coinbase_txid = []
        for i in self.coinbase_blocks:
            coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
        self.nodes[0].generate(427, self.signblockprivkey_wif)  # Block 429
        self.lastblockhash = self.nodes[0].getbestblockhash()
        self.tip = int("0x" + self.lastblockhash, 0)
        self.lastblockheight = 429
        self.lastblocktime = int(time.time()) + 429

        self.log.info(
            "Test 1: NULLDUMMY compliant base transactions should be accepted to mempool"
        )
        test1txs = [
            create_transaction(self.nodes[0],
                               coinbase_txid[0],
                               self.ms_address,
                               amount=49)
        ]
        txid1 = self.nodes[0].sendrawtransaction(
            bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
        test1txs.append(
            create_transaction(self.nodes[0],
                               txid1,
                               self.ms_address,
                               amount=48))
        txid2 = self.nodes[0].sendrawtransaction(
            bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
        self.block_submit(self.nodes[0], test1txs, False, True)

        self.log.info(
            "Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool"
        )
        test2tx = create_transaction(self.nodes[0],
                                     txid2,
                                     self.ms_address,
                                     amount=47.99999)
        trueDummy(test2tx)
        assert_raises_rpc_error(
            -26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction,
            bytes_to_hex_str(test2tx.serialize_with_witness()), True)

        self.log.info(
            "Test 3: Non-NULLDUMMY base transactions should not be accepted in a block"
        )
        self.block_submit(self.nodes[0], [test2tx], False)

        self.log.info(
            "Test 4: Non-NULLDUMMY base multisig transaction is invalid ")
        test4tx = create_transaction(self.nodes[0],
                                     txid2,
                                     self.address,
                                     amount=46)
        test6txs = [CTransaction(test4tx)]
        trueDummy(test4tx)
        assert_raises_rpc_error(
            -26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction,
            bytes_to_hex_str(test4tx.serialize_with_witness()), True)
        self.block_submit(self.nodes[0], [test4tx])

        self.log.info(
            "Test 6: NULLDUMMY compliant base transactions should be accepted to mempool and in block"
        )
        for i in test6txs:
            self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize()),
                                             True)
        self.block_submit(self.nodes[0], test6txs, False, True)
    def run_test(self):
        self.nodes[0].createwallet(wallet_name='wmulti',
                                   disable_private_keys=True)
        wmulti = self.nodes[0].get_wallet_rpc('wmulti')
        w0 = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
        self.address = w0.getnewaddress()
        self.pubkey = w0.getaddressinfo(self.address)['pubkey']
        self.ms_address = wmulti.addmultisigaddress(1,
                                                    [self.pubkey])['address']
        self.wit_address = w0.getnewaddress(address_type='p2sh-segwit')
        self.wit_ms_address = wmulti.addmultisigaddress(
            1, [self.pubkey], '', 'p2sh-segwit')['address']
        if not self.options.descriptors:
            # Legacy wallets need to import these so that they are watched by the wallet. This is unnecessary (and does not need to be tested) for descriptor wallets
            wmulti.importaddress(self.ms_address)
            wmulti.importaddress(self.wit_ms_address)

        self.coinbase_blocks = self.nodes[0].generate(2)  # Block 2
        coinbase_txid = []
        for i in self.coinbase_blocks:
            coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
        self.nodes[0].generate(427)  # Block 429
        self.lastblockhash = self.nodes[0].getbestblockhash()
        self.lastblockheight = 429
        self.lastblocktime = int(time.time()) + 429

        self.log.info(
            "Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]"
        )
        test1txs = [
            create_transaction(self.nodes[0],
                               coinbase_txid[0],
                               self.ms_address,
                               amount=49)
        ]
        txid1 = self.nodes[0].sendrawtransaction(
            test1txs[0].serialize_with_witness().hex(), 0)
        test1txs.append(
            create_transaction(self.nodes[0],
                               txid1,
                               self.ms_address,
                               amount=48))
        txid2 = self.nodes[0].sendrawtransaction(
            test1txs[1].serialize_with_witness().hex(), 0)
        test1txs.append(
            create_transaction(self.nodes[0],
                               coinbase_txid[1],
                               self.wit_ms_address,
                               amount=49))
        txid3 = self.nodes[0].sendrawtransaction(
            test1txs[2].serialize_with_witness().hex(), 0)
        self.block_submit(self.nodes[0], test1txs, False, True)

        self.log.info(
            "Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation"
        )
        test2tx = create_transaction(self.nodes[0],
                                     txid2,
                                     self.ms_address,
                                     amount=47)
        trueDummy(test2tx)
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR,
                                self.nodes[0].sendrawtransaction,
                                test2tx.serialize_with_witness().hex(), 0)

        self.log.info(
            "Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]"
        )
        self.block_submit(self.nodes[0], [test2tx], False, True)

        self.log.info(
            "Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation"
        )
        test4tx = create_transaction(self.nodes[0],
                                     test2tx.hash,
                                     self.address,
                                     amount=46)
        test6txs = [CTransaction(test4tx)]
        trueDummy(test4tx)
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR,
                                self.nodes[0].sendrawtransaction,
                                test4tx.serialize_with_witness().hex(), 0)
        self.block_submit(self.nodes[0], [test4tx])

        self.log.info(
            "Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation"
        )
        test5tx = create_transaction(self.nodes[0],
                                     txid3,
                                     self.wit_address,
                                     amount=48)
        test6txs.append(CTransaction(test5tx))
        test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
        assert_raises_rpc_error(-26, NULLDUMMY_ERROR,
                                self.nodes[0].sendrawtransaction,
                                test5tx.serialize_with_witness().hex(), 0)
        self.block_submit(self.nodes[0], [test5tx], True)

        self.log.info(
            "Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]"
        )
        for i in test6txs:
            self.nodes[0].sendrawtransaction(i.serialize_with_witness().hex(),
                                             0)
        self.block_submit(self.nodes[0], test6txs, True, True)
Example #24
0
def send_generic_input_tx(node, coinbases, address):
    return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction(node, node.getblock(coinbases.pop())['tx'][0], address, amount=Decimal("49.99")))))
Example #25
0
 def create_spend_tx(self, scriptSig):
     self.tx2 = create_transaction(self.tx1, 0, CScript(), 0)
     self.tx2.vin[0].scriptSig = scriptSig
     self.tx2.vout[0].scriptPubKey = CScript()
     self.tx2.rehash()
Example #26
0
def create_bip112special(node, input, txversion, address):
    tx = create_transaction(node, input, address, amount=Decimal("49.98"))
    tx.nVersion = txversion
    signtx = sign_transaction(node, tx)
    signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
    return signtx
    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        best_block = self.nodes[0].getbestblockhash()
        tip = int(best_block, 16)
        best_block_time = self.nodes[0].getblock(best_block)['time']
        block_time = best_block_time + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase.")
        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block], node, success=True)

        self.log.info("Mature the block.")
        self.nodes[0].generate(100)

        # b'\x64' is OP_NOTIF
        # Transaction will be rejected with code 16 (REJECT_INVALID)
        # and we get disconnected immediately
        self.log.info('Test a transaction that is rejected')
        tx1 = create_transaction(block1.vtx[0], 0, b'\x64' * 35, 50 * COIN - 12000)
        node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True)

        # Make two p2p connections to provide the node with orphans
        # * p2ps[0] will send valid orphan txs (one with low fee)
        # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
        self.reconnect_p2p(num_connections=2)

        self.log.info('Test orphan transaction handling ... ')
        # Create a root transaction that we withhold until all dependend transactions
        # are sent out and in the orphan cache
        SCRIPT_PUB_KEY_OP_TRUE = b'\x51\x75' * 15 + b'\x51'
        tx_withhold = CTransaction()
        tx_withhold.vin.append(CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0)))
        tx_withhold.vout.append(CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_withhold.calc_sha256()

        # Our first orphan tx with some outputs to create further orphan txs
        tx_orphan_1 = CTransaction()
        tx_orphan_1.vin.append(CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0)))
        tx_orphan_1.vout = [CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)] * 3
        tx_orphan_1.calc_sha256()

        # A valid transaction with low fee
        tx_orphan_2_no_fee = CTransaction()
        tx_orphan_2_no_fee.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0)))
        tx_orphan_2_no_fee.vout.append(CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        # A valid transaction with sufficient fee
        tx_orphan_2_valid = CTransaction()
        tx_orphan_2_valid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1)))
        tx_orphan_2_valid.vout.append(CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
        tx_orphan_2_valid.calc_sha256()

        # An invalid transaction with negative fee
        tx_orphan_2_invalid = CTransaction()
        tx_orphan_2_invalid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2)))
        tx_orphan_2_invalid.vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))

        self.log.info('Send the orphans ... ')
        # Send valid orphan txs from p2ps[0]
        node.p2p.send_txs_and_test([tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False)
        # Send invalid tx from p2ps[1]
        node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid], node, success=False)

        assert_equal(0, node.getmempoolinfo()['size'])  # Mempool should be empty
        assert_equal(2, len(node.getpeerinfo()))  # p2ps[1] is still connected

        self.log.info('Send the withhold tx ... ')
        node.p2p.send_txs_and_test([tx_withhold], node, success=True)

        # Transactions that should end up in the mempool
        expected_mempool = {
            t.hash
            for t in [
                tx_withhold,  # The transaction that is the root for all orphans
                tx_orphan_1,  # The orphan transaction that splits the coins
                tx_orphan_2_valid,  # The valid transaction (with sufficient fee)
            ]
        }
        # Transactions that do not end up in the mempool
        # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
        # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)

        wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12)  # p2ps[1] is no longer connected
        assert_equal(expected_mempool, set(node.getrawmempool()))

        # restart node with sending BIP61 messages disabled, check that it disconnects without sending the reject message
        self.log.info('Test a transaction that is rejected, with BIP61 disabled')
        self.restart_node(0, ['-enablebip61=0','-persistmempool=0'])
        self.reconnect_p2p(num_connections=1)
        node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True)
        # send_txs_and_test will have waited for disconnect, so we can safely check that no reject has been received
        assert_equal(node.p2p.reject_code_received, None)
Example #28
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", CLTV_HEIGHT - 2)
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(CLTV_HEIGHT - 2)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that an invalid-according-to-CLTV transaction can still appear in a block"
        )

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[0],
                                     self.nodeaddress,
                                     amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1),
                             block_time)
        block.nVersion = 3
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 4")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time)
        block.nVersion = 3
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                '{}, bad-version(0x00000003)'.format(block.hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that invalid-according-to-cltv transactions cannot appear in a block"
        )
        block.nVersion = 4

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[1],
                                     self.nodeaddress,
                                     amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for CLTV by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.hash,
            'allowed':
            False,
            'reject-reason':
            '64: non-mandatory-script-verify-flag (Negative locktime)'
        }],
                     self.nodes[0].testmempoolaccept(
                         rawtxs=[bytes_to_hex_str(spendtx.serialize())],
                         allowhighfees=True))

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                'CheckInputs on {} failed with non-mandatory-script-verify-flag (Negative locktime)'
                .format(block.vtx[-1].hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted"
        )
        spendtx = cltv_validate(self.nodes[0], spendtx, CLTV_HEIGHT - 1)
        spendtx.rehash()

        block.vtx.pop(1)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
Example #29
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(DERSIG_HEIGHT - 2)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that a transaction with non-DER signature can still appear in a block"
        )

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[0],
                                     self.nodeaddress,
                                     amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1),
                             block_time)
        block.nVersion = 2
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 3")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time)
        block.nVersion = 2
        block.rehash()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                '{}, bad-version(0x00000002)'.format(block.hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that transactions with non-DER signatures cannot appear in a block"
        )
        block.nVersion = 3

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[1],
                                     self.nodeaddress,
                                     amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for DERSIG by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.hash,
            'allowed':
            False,
            'reject-reason':
            '64: non-mandatory-script-verify-flag (Non-canonical DER signature)'
        }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()],
                                            maxfeerate=0))

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                'CheckInputs on {} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)'
                .format(block.vtx[-1].hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(),
                   lock=mininode_lock)
        with mininode_lock:
            assert self.nodes[0].p2p.last_message["reject"].code in [
                REJECT_INVALID, REJECT_NONSTANDARD
            ]
            assert_equal(self.nodes[0].p2p.last_message["reject"].data,
                         block.sha256)
            assert b'Non-canonical DER signature' in self.nodes[
                0].p2p.last_message["reject"].reason

        self.log.info(
            "Test that a version 3 block with a DERSIG-compliant transaction is accepted"
        )
        block.vtx[1] = create_transaction(self.nodes[0],
                                          self.coinbase_txids[1],
                                          self.nodeaddress,
                                          amount=1.0)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
Example #30
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining {} blocks".format(CLTV_HEIGHT - 2))
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(CLTV_HEIGHT - 2)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that an invalid-according-to-CLTV transaction can still appear in a block"
        )

        fundtx = create_transaction(self.nodes[0], self.coinbase_txids[0],
                                    self.nodeaddress, 49.99)
        fundtx, spendtx = cltv_lock_to_height(self.nodes[0], fundtx,
                                              self.nodeaddress, 49.98)

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1),
                             block_time)
        block.nVersion = 3
        block.vtx.append(fundtx)
        # include the -1 CLTV in block
        block.vtx.append(spendtx)
        make_conform_to_ctor(block)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        # This block is valid
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 4")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time)
        block.nVersion = 3
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                '{}, bad-version(0x00000003)'.format(block.hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that invalid-according-to-cltv transactions cannot appear in a block"
        )
        block.nVersion = 4

        fundtx = create_transaction(self.nodes[0], self.coinbase_txids[1],
                                    self.nodeaddress, 49.99)
        fundtx, spendtx = cltv_lock_to_height(self.nodes[0], fundtx,
                                              self.nodeaddress, 49.98)

        # The funding tx only has unexecuted bad CLTV, in scriptpubkey; this is valid.
        self.nodes[0].p2p.send_and_ping(msg_tx(fundtx))
        assert fundtx.hash in self.nodes[0].getrawmempool()

        # Mine a block containing the funding transaction
        block.vtx.append(fundtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        # This block is valid
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        # We show that this tx is invalid due to CLTV by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.hash,
            'allowed':
            False,
            'reject-reason':
            '64: non-mandatory-script-verify-flag (Negative locktime)'
        }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()],
                                            allowhighfees=True))

        rejectedtx_signed = self.nodes[0].signrawtransactionwithwallet(
            ToHex(spendtx))

        # Couldn't complete signature due to CLTV
        assert rejectedtx_signed['errors'][0]['error'] == 'Negative locktime'

        tip = block.hash
        block_time += 1
        block = create_block(block.sha256, create_coinbase(CLTV_HEIGHT + 1),
                             block_time)
        block.nVersion = 4
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                'ConnectBlock {} failed (blk-bad-inputs'.format(block.hash)
        ]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(self.nodes[0].getbestblockhash(), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info(
            "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted"
        )
        fundtx = create_transaction(self.nodes[0], self.coinbase_txids[2],
                                    self.nodeaddress, 49.99)
        fundtx, spendtx = cltv_lock_to_height(self.nodes[0], fundtx,
                                              self.nodeaddress, 49.98,
                                              CLTV_HEIGHT)

        # make sure sequence is nonfinal and locktime is good
        spendtx.vin[0].nSequence = 0xfffffffe
        spendtx.nLockTime = CLTV_HEIGHT

        # both transactions are fully valid
        self.nodes[0].sendrawtransaction(ToHex(fundtx))
        self.nodes[0].sendrawtransaction(ToHex(spendtx))

        # Modify the transactions in the block to be valid against CLTV
        block.vtx.pop(1)
        block.vtx.append(fundtx)
        block.vtx.append(spendtx)
        make_conform_to_ctor(block)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        # This block is now valid
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)
Example #31
0
    def run_test(self):
        # Add p2p connection to node0
        node = self.nodes[0]  # convenience reference to the node
        node.add_p2p_connection(P2PDataStore())

        network_thread_start()
        node.p2p.wait_for_verack()

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase")

        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block1], node, True)

        self.log.info("Mature the block.")
        node.generate(100)

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        # Use merkle-root malleability to generate an invalid block with
        # same blockheader.
        # Manufacture a block with 3 transactions (coinbase, spend of prior
        # coinbase, spend of that spend).  Duplicate the 3rd transaction to
        # leave merkle root and blockheader unchanged but invalidate the block.
        self.log.info("Test merkle root malleability.")

        block2 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1

        # b'0x51' is OP_TRUE
        tx1 = create_transaction(block1.vtx[0], 0, b'\x51', 50 * COIN)
        tx2 = create_transaction(tx1, 0, b'\x51', 50 * COIN)

        block2.vtx.extend([tx1, tx2])
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.rehash()
        block2.solve()
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        block2.vtx.append(tx2)
        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
        assert_equal(orig_hash, block2.rehash())
        assert(block2_orig.vtx != block2.vtx)

        node.p2p.send_blocks_and_test([block2], node, False, False, 16, b'bad-txns-duplicate')

        self.log.info("Test very broken block.")

        block3 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1
        block3.vtx[0].vout[0].nValue = 100 * COIN  # Too high!
        block3.vtx[0].sha256 = None
        block3.vtx[0].calc_sha256()
        block3.hashMerkleRoot = block3.calc_merkle_root()
        block3.rehash()
        block3.solve()

        node.p2p.send_blocks_and_test([block3], node, False, False, 16, b'bad-cb-amount')
Example #32
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", CLTV_HEIGHT - 2)
        self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(CLTV_HEIGHT - 2)]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that an invalid-according-to-CLTV transaction can still appear in a block")

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0],
                self.nodeaddress, amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1), block_time)
        block.nVersion = 3
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 4")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time)
        block.nVersion = 3
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info("Test that invalid-according-to-cltv transactions cannot appear in a block")
        block.nVersion = 4

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[1],
                self.nodeaddress, amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for CLTV by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal(
            [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Negative locktime)'}],
            self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()], maxfeerate=0)
        )

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputs on {} failed with non-mandatory-script-verify-flag (Negative locktime)'.format(block.vtx[-1].hash)]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted")
        spendtx = cltv_validate(self.nodes[0], spendtx, CLTV_HEIGHT - 1)
        spendtx.rehash()

        block.vtx.pop(1)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
    def run_test(self):
        # Setup the p2p connections and start up the network thread.
        # test_node connects to node0 (not whitelisted)
        test_node = self.nodes[0].add_p2p_connection(P2PInterface())
        # min_work_node connects to node1 (whitelisted)
        min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())

        network_thread_start()

        # Test logic begins here
        test_node.wait_for_verack()
        min_work_node.wait_for_verack()

        # 1. Have nodes mine a block (leave IBD)
        [ n.generate(1) for n in self.nodes ]
        tips = [ int("0x" + n.getbestblockhash(), 0) for n in self.nodes ]

        # 2. Send one block that builds on each tip.
        # This should be accepted by node0
        blocks_h2 = []  # the height 2 blocks on each node's chain
        block_time = int(time.time()) + 1
        for i in range(2):
            blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
            blocks_h2[i].solve()
            block_time += 1
        test_node.send_message(msg_block(blocks_h2[0]))
        min_work_node.send_message(msg_block(blocks_h2[1]))

        for x in [test_node, min_work_node]:
            x.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        assert_equal(self.nodes[1].getblockcount(), 1)
        self.log.info("First height 2 block accepted by node0; correctly rejected by node1")

        # 3. Send another block that builds on genesis.
        block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time)
        block_time += 1
        block_h1f.solve()
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h1f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash)

        # 4. Send another two block that build on the fork.
        block_h2f = create_block(block_h1f.sha256, create_coinbase(2), block_time)
        block_time += 1
        block_h2f.solve()
        test_node.send_message(msg_block(block_h2f))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h2f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)

        # But this block should be accepted by node since it has equal work.
        self.nodes[0].getblock(block_h2f.hash)
        self.log.info("Second height 2 block accepted, but not reorg'ed to")

        # 4b. Now send another block that builds on the forking chain.
        block_h3 = create_block(block_h2f.sha256, create_coinbase(3), block_h2f.nTime+1)
        block_h3.solve()
        test_node.send_message(msg_block(block_h3))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h3.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)
        self.nodes[0].getblock(block_h3.hash)

        # But this block should be accepted by node since it has more work.
        self.nodes[0].getblock(block_h3.hash)
        self.log.info("Unrequested more-work block accepted")

        # 4c. Now mine 288 more blocks and deliver; all should be processed but
        # the last (height-too-high) on node (as long as it is not missing any headers)
        tip = block_h3
        all_blocks = []
        for i in range(288):
            next_block = create_block(tip.sha256, create_coinbase(i + 4), tip.nTime+1)
            next_block.solve()
            all_blocks.append(next_block)
            tip = next_block

        # Now send the block at height 5 and check that it wasn't accepted (missing header)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash)
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash)

        # The block at height 5 should be accepted if we provide the missing header, though
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(all_blocks[0]))
        test_node.send_message(headers_message)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        self.nodes[0].getblock(all_blocks[1].hash)

        # Now send the blocks in all_blocks
        for i in range(288):
            test_node.send_message(msg_block(all_blocks[i]))
        test_node.sync_with_ping()

        # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
        for x in all_blocks[:-1]:
            self.nodes[0].getblock(x.hash)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)

        # 5. Test handling of unrequested block on the node that didn't process
        # Should still not be processed (even though it has a child that has more
        # work).

        # The node should have requested the blocks at some point, so
        # disconnect/reconnect first

        self.nodes[0].disconnect_p2ps()
        self.nodes[1].disconnect_p2ps()
        network_thread_join()

        test_node = self.nodes[0].add_p2p_connection(P2PInterface())
        network_thread_start()
        test_node.wait_for_verack()

        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        self.log.info("Unrequested block that would complete more-work chain was ignored")

        # 6. Try to get node to request the missing block.
        # Poke the node with an inv for block at height 3 and see if that
        # triggers a getdata on block 2 (it should if block 2 is missing).
        with mininode_lock:
            # Clear state so we can check the getdata request
            test_node.last_message.pop("getdata", None)
            test_node.send_message(msg_inv([CInv(2, block_h3.sha256)]))

        test_node.sync_with_ping()
        with mininode_lock:
            getdata = test_node.last_message["getdata"]

        # Check that the getdata includes the right block
        assert_equal(getdata.inv[0].hash, block_h1f.sha256)
        self.log.info("Inv at tip triggered getdata for unprocessed block")

        # 7. Send the missing block for the third time (now it is requested)
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 290)
        self.nodes[0].getblock(all_blocks[286].hash)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash)
        self.log.info("Successfully reorged to longer chain from non-whitelisted peer")

        # 8. Create a chain which is invalid at a height longer than the
        # current chain, but which has more blocks on top of that
        block_289f = create_block(all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime+1)
        block_289f.solve()
        block_290f = create_block(block_289f.sha256, create_coinbase(290), block_289f.nTime+1)
        block_290f.solve()
        block_291 = create_block(block_290f.sha256, create_coinbase(291), block_290f.nTime+1)
        # block_291 spends a coinbase below maturity!
        block_291.vtx.append(create_transaction(block_290f.vtx[0], 0, b"42", 1))
        block_291.hashMerkleRoot = block_291.calc_merkle_root()
        block_291.solve()
        block_292 = create_block(block_291.sha256, create_coinbase(292), block_291.nTime+1)
        block_292.solve()

        # Now send all the headers on the chain and enough blocks to trigger reorg
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_289f))
        headers_message.headers.append(CBlockHeader(block_290f))
        headers_message.headers.append(CBlockHeader(block_291))
        headers_message.headers.append(CBlockHeader(block_292))
        test_node.send_message(headers_message)

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_292.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert(tip_entry_found)
        assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_292.hash)

        test_node.send_message(msg_block(block_289f))
        test_node.send_message(msg_block(block_290f))

        test_node.sync_with_ping()
        self.nodes[0].getblock(block_289f.hash)
        self.nodes[0].getblock(block_290f.hash)

        test_node.send_message(msg_block(block_291))

        # At this point we've sent an obviously-bogus block, wait for full processing
        # without assuming whether we will be disconnected or not
        try:
            # Only wait a short while so the test doesn't take forever if we do get
            # disconnected
            test_node.sync_with_ping(timeout=1)
        except AssertionError:
            test_node.wait_for_disconnect()

            self.nodes[0].disconnect_p2ps()
            test_node = self.nodes[0].add_p2p_connection(P2PInterface())

            network_thread_start()
            test_node.wait_for_verack()

        # We should have failed reorg and switched back to 290 (but have block 291)
        assert_equal(self.nodes[0].getblockcount(), 290)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"], -1)

        # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected
        block_293 = create_block(block_292.sha256, create_coinbase(293), block_292.nTime+1)
        block_293.solve()
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_293))
        test_node.send_message(headers_message)
        test_node.wait_for_disconnect()

        # 9. Connect node1 to node0 and ensure it is able to sync
        connect_nodes(self.nodes[0], 1)
        sync_blocks([self.nodes[0], self.nodes[1]])
        self.log.info("Successfully synced nodes 1 and 0")
Example #34
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
        self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(DERSIG_HEIGHT - 2)]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that a transaction with non-DER signature can still appear in a block")

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0],
                self.nodeaddress, amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1), block_time)
        block.nVersion = VB_TOP_BITS
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least VB_TOP_BITS")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time)
        block.nVersion = 2
        block.rehash()
        block.solve()
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
        with mininode_lock:
            assert_equal(self.nodes[0].p2p.last_message["reject"].code, REJECT_OBSOLETE)
            assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'bad-version(0x00000002)')
            assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
            del self.nodes[0].p2p.last_message["reject"]

        self.log.info("Test that transactions with non-DER signatures cannot appear in a block")
        block.nVersion = VB_TOP_BITS

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[1],
                self.nodeaddress, amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for DERSIG by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal(
            [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Non-canonical DER signature)'}],
            self.nodes[0].testmempoolaccept(rawtxs=[bytes_to_hex_str(spendtx.serialize())], allowhighfees=True)
        )

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
        with mininode_lock:
            # We can receive different reject messages depending on whether
            # bitcoind is running with multiple script check threads. If script
            # check threads are not in use, then transaction script validation
            # happens sequentially, and bitcoind produces more specific reject
            # reasons.
            assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD]
            assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
            if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID:
                # Generic rejection when a block is invalid
                assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'block-validation-failed')
            else:
                assert b'Non-canonical DER signature' in self.nodes[0].p2p.last_message["reject"].reason

        self.log.info("Test that a version 3 block with a DERSIG-compliant transaction is accepted")
        block.vtx[1] = create_transaction(self.nodes[0], self.coinbase_txids[1], self.nodeaddress, amount=1.0)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
Example #35
0
    def run_test(self):
        self.setup_stake_coins(self.nodes[0])

        self.address = self.nodes[0].getnewaddress()
        self.ms_address = self.nodes[0].addmultisigaddress(
            1, [self.address])['address']
        self.wit_address = self.nodes[0].addwitnessaddress(self.address)
        self.wit_ms_address = self.nodes[0].addmultisigaddress(
            1, [self.address], '', 'p2sh-segwit')['address']

        self.nodes[0].add_p2p_connection(P2PInterface())

        self.coinbase_blocks = self.nodes[0].generate(2)  # Block 2
        coinbase_txid = []
        for i in self.coinbase_blocks:
            coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
        self.lastblockhash = self.nodes[0].getbestblockhash()
        self.tip = int("0x" + self.lastblockhash, 0)
        self.lastblockheight = self.nodes[0].getblockcount()
        self.lastblocktime = int(time.time()) + 2

        ms_tx = create_transaction(self.nodes[0],
                                   coinbase_txid[0],
                                   self.ms_address,
                                   amount=PROPOSER_REWARD - 1)
        ms_txid = self.nodes[0].sendrawtransaction(
            bytes_to_hex_str(ms_tx.serialize_with_witness()), True)

        wit_ms_tx = create_transaction(self.nodes[0],
                                       coinbase_txid[1],
                                       self.wit_ms_address,
                                       amount=PROPOSER_REWARD - 1)
        wit_ms_txid = self.nodes[0].sendrawtransaction(
            bytes_to_hex_str(wit_ms_tx.serialize_with_witness()), True)

        self.send_block(self.nodes[0], [ms_tx, wit_ms_tx], True)

        self.log.info(
            "Test 1: Non-NULLDUMMY base multisig transaction is invalid")
        test1tx = create_transaction(self.nodes[0],
                                     ms_txid,
                                     self.address,
                                     amount=PROPOSER_REWARD - 2)
        test3txs = [CTransaction(test1tx)]
        trueDummy(test1tx)
        assert_raises_rpc_error(
            -26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction,
            bytes_to_hex_str(test1tx.serialize_with_witness()), True)
        self.send_block(self.nodes[0], [test1tx])

        self.log.info(
            "Test 2: Non-NULLDUMMY P2WSH multisig transaction invalid")
        test2tx = create_transaction(self.nodes[0],
                                     wit_ms_txid,
                                     self.wit_address,
                                     amount=PROPOSER_REWARD - 2)
        test3txs.append(CTransaction(test2tx))
        test2tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
        assert_raises_rpc_error(
            -26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction,
            bytes_to_hex_str(test2tx.serialize_with_witness()), True)
        self.send_block(self.nodes[0], [test2tx])

        self.log.info(
            "Test 3: NULLDUMMY compliant base/witness transactions should be accepted to mempool"
        )
        for i in test3txs:
            self.nodes[0].sendrawtransaction(
                bytes_to_hex_str(i.serialize_with_witness()), True)
        self.send_block(self.nodes[0], test3txs, True)
Example #36
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
        self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(DERSIG_HEIGHT - 2)]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that a transaction with non-DER signature can still appear in a block")

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0],
                self.nodeaddress, amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1), block_time)
        block.nVersion = 2
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 3")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time)
        block.nVersion = 2
        block.rehash()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000002)'.format(block.hash)]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        self.log.info("Test that transactions with non-DER signatures cannot appear in a block")
        block.nVersion = 3

        spendtx = create_transaction(self.nodes[0], self.coinbase_txids[1],
                self.nodeaddress, amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for DERSIG by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal(
            [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Non-canonical DER signature)'}],
            self.nodes[0].testmempoolaccept(rawtxs=[bytes_to_hex_str(spendtx.serialize())], allowhighfees=True)
        )

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputs on {} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)'.format(block.vtx[-1].hash)]):
            self.nodes[0].p2p.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            self.nodes[0].p2p.sync_with_ping()

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
        with mininode_lock:
            assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD]
            assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
            assert b'Non-canonical DER signature' in self.nodes[0].p2p.last_message["reject"].reason

        self.log.info("Test that a version 3 block with a DERSIG-compliant transaction is accepted")
        block.vtx[1] = create_transaction(self.nodes[0], self.coinbase_txids[1], self.nodeaddress, amount=1.0)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PDataStore())

        self.log.info("Generate blocks in the past for coinbase outputs.")
        long_past_time = int(time.time()) - 600 * 1000  # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
        self.nodes[0].setmocktime(long_past_time - 100)  # enough so that the generated blocks will still all be before long_past_time
        self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2 * 32 + 1)  # 82 blocks generated for inputs
        self.nodes[0].setmocktime(0)  # set time back to present so yielded blocks aren't in the future as we advance last_block_time
        self.tipheight = 82  # height of the next block to build
        self.last_block_time = long_past_time
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info("Test that the csv softfork is DEFINED")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
        test_blocks = self.generate_blocks(61, 4)
        self.sync_blocks(test_blocks)

        self.log.info("Advance from DEFINED to STARTED, height = 143")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Fail to achieve LOCKED_IN")
        # 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks

        test_blocks = self.generate_blocks(50, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(20, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(24, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Failed to advance past STARTED, height = 287")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')

        self.log.info("Generate blocks to achieve LOCK-IN")
        # 108 out of 144 signal bit 0 to achieve lock-in
        # using a variety of bits to simulate multiple parallel softforks
        test_blocks = self.generate_blocks(58, 536870913)  # 0x20000001 (signalling ready)
        test_blocks = self.generate_blocks(26, 4, test_blocks)  # 0x00000004 (signalling not)
        test_blocks = self.generate_blocks(50, 536871169, test_blocks)  # 0x20000101 (signalling ready)
        test_blocks = self.generate_blocks(10, 536936448, test_blocks)  # 0x20010000 (signalling not)
        self.sync_blocks(test_blocks)

        self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Generate 140 more version 4 blocks
        test_blocks = self.generate_blocks(140, 4)
        self.sync_blocks(test_blocks)

        # Inputs at height = 572
        #
        # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
        # Note we reuse inputs for v1 and v2 txs so must test these separately
        # 16 normal inputs
        bip68inputs = []
        for i in range(16):
            bip68inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))

        # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112basicinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
            bip112basicinputs.append(inputs)

        # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112diverseinputs = []
        for j in range(2):
            inputs = []
            for i in range(16):
                inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
            bip112diverseinputs.append(inputs)

        # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
        bip112specialinput = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)

        # 1 normal input
        bip113input = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)

        self.nodes[0].setmocktime(self.last_block_time + 600)
        inputblockhash = self.nodes[0].generate(1)[0]  # 1 block generated for inputs to be in chain at height 572
        self.nodes[0].setmocktime(0)
        self.tip = int(inputblockhash, 16)
        self.tipheight += 1
        self.last_block_time += 600
        assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1)

        # 2 more version 4 blocks
        test_blocks = self.generate_blocks(2, 4)
        self.sync_blocks(test_blocks)

        self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)")
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')

        # Test both version 1 and version 2 transactions for all tests
        # BIP113 test transaction will be modified before each use to put in appropriate block time
        bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98"))
        bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v1.nVersion = 1
        bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98"))
        bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
        bip113tx_v2.nVersion = 2

        # For BIP68 test all 16 relative sequence locktimes
        bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress)
        bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress)

        # For BIP112 test:
        # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress)
        bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress)
        # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
        bip112txs_vary_nSequence_9_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1)
        bip112txs_vary_nSequence_9_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1)
        # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress)
        bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress)
        # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
        bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1)
        bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1)
        # -1 OP_CSV OP_DROP input
        bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress)
        bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress)

        self.log.info("TESTING")

        self.log.info("Pre-Soft Fork Tests. All txs should pass.")
        self.log.info("Test version 1 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        success_txs.append(bip113signed1)
        success_txs.append(bip112tx_special_v1)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        success_txs = []
        # add BIP113 tx and -1 CSV tx
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        success_txs.append(bip113signed2)
        success_txs.append(bip112tx_special_v2)
        # add BIP 68 txs
        success_txs.extend(all_rlt_txs(bip68txs_v2))
        # add BIP 112 with seq=10 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
        # try BIP 112 with seq=9 txs
        success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
        success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
        test_blocks = self.generate_blocks(1, 4)
        self.sync_blocks(test_blocks)
        assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')

        self.log.info("Post-Soft Fork Tests.")

        self.log.info("BIP 113 tests")
        # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5  # = MTP of prior block (not <) but < time put on current block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])], success=False)
        # BIP 113 tests should now pass if the locktime is < MTP
        bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
        bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1  # < MTP of prior block
        bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
        for bip113tx in [bip113signed1, bip113signed2]:
            self.sync_blocks([self.create_test_block([bip113tx])])
            self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Next block height = 580 after 4 blocks of random version
        test_blocks = self.generate_blocks(4, 1234)
        self.sync_blocks(test_blocks)

        self.log.info("BIP 68 tests")
        self.log.info("Test version 1 txs - all should still pass")

        success_txs = []
        success_txs.extend(all_rlt_txs(bip68txs_v1))
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("Test version 2 txs")

        # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
        bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
        bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
        for tx in bip68timetxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 581
        test_blocks = self.generate_blocks(1, 1234)
        self.sync_blocks(test_blocks)

        # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
        bip68success_txs.extend(bip68timetxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        for tx in bip68heighttxs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Advance one block to 582
        test_blocks = self.generate_blocks(1, 1234)
        self.sync_blocks(test_blocks)

        # All BIP 68 txs should pass
        bip68success_txs.extend(bip68heighttxs)
        self.sync_blocks([self.create_test_block(bip68success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        self.log.info("BIP 112 tests")
        self.log.info("Test version 1 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass

        success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
        fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        self.log.info("Test version 2 txs")

        # -1 OP_CSV tx should fail
        self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
        success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]

        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##

        # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
        fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
        fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # If sequencelock types mismatch, tx should fail
        fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
        fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
        for tx in fail_txs:
            self.sync_blocks([self.create_test_block([tx])], success=False)

        # Remaining txs should pass, just test masking works properly
        success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
        success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
        self.sync_blocks([self.create_test_block(success_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Additional test, of checking that comparison of two time types works properly
        time_txs = []
        for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]:
            tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
            signtx = sign_transaction(self.nodes[0], tx)
            time_txs.append(signtx)

        self.sync_blocks([self.create_test_block(time_txs)])
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
Example #38
0
    def run_test(self):
        self.address = self.nodes[0].getnewaddress()
        self.ms_address = self.nodes[0].addmultisigaddress(
            1, [self.address])['address']
        self.wit_address = self.nodes[0].getnewaddress(
            address_type='p2sh-segwit')
        self.wit_ms_address = self.nodes[0].addmultisigaddress(
            1, [self.address], '', 'p2sh-segwit')['address']

        self.coinbase_blocks = self.nodes[0].generate(2)  # Block 2
        coinbase_txid = []
        for i in self.coinbase_blocks:
            coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])

        for i in range(COINBASE_MATURITY):
            block = create_block(
                int(self.nodes[0].getbestblockhash(), 16),
                create_coinbase(self.nodes[0].getblockcount() + 1),
                int(time.time()) + 2 + i)
            block.nVersion = 4
            block.hashMerkleRoot = block.calc_merkle_root()
            block.rehash()
            block.solve()
            print(self.nodes[0].submitblock(bytes_to_hex_str(
                block.serialize())))

        # Generate the number blocks signalling  that the continuation of the test case expects
        self.nodes[0].generate(863 - COINBASE_MATURITY - 2 - 2)
        self.lastblockhash = self.nodes[0].getbestblockhash()
        self.tip = int("0x" + self.lastblockhash, 0)
        self.lastblockheight = self.nodes[0].getblockcount()
        self.lastblocktime = int(time.time()) + self.lastblockheight + 1

        self.log.info(
            "Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]"
        )
        test1txs = [
            create_transaction(self.nodes[0],
                               coinbase_txid[0],
                               self.ms_address,
                               amount=49)
        ]
        txid1 = self.nodes[0].sendrawtransaction(
            bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
        test1txs.append(
            create_transaction(self.nodes[0],
                               txid1,
                               self.ms_address,
                               amount=48))
        txid2 = self.nodes[0].sendrawtransaction(
            bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
        test1txs.append(
            create_transaction(self.nodes[0],
                               coinbase_txid[1],
                               self.wit_ms_address,
                               amount=49))
        txid3 = self.nodes[0].sendrawtransaction(
            bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
        self.block_submit(self.nodes[0], test1txs, False, True)

        self.log.info(
            "Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation"
        )
        test2tx = create_transaction(self.nodes[0],
                                     txid2,
                                     self.ms_address,
                                     amount=47)
        trueDummy(test2tx)
        assert_raises_rpc_error(
            -26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction,
            bytes_to_hex_str(test2tx.serialize_with_witness()), True)

        self.log.info(
            "Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]"
        )
        self.block_submit(self.nodes[0], [test2tx], False, True)

        self.log.info(
            "Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation"
        )
        test4tx = create_transaction(self.nodes[0],
                                     test2tx.hash,
                                     self.address,
                                     amount=46)
        test6txs = [CTransaction(test4tx)]
        trueDummy(test4tx)
        assert_raises_rpc_error(
            -26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction,
            bytes_to_hex_str(test4tx.serialize_with_witness()), True)
        self.block_submit(self.nodes[0], [test4tx])

        self.log.info(
            "Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation"
        )
        test5tx = create_transaction(self.nodes[0],
                                     txid3,
                                     self.wit_address,
                                     amount=48)
        test6txs.append(CTransaction(test5tx))
        test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
        assert_raises_rpc_error(
            -26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction,
            bytes_to_hex_str(test5tx.serialize_with_witness()), True)
        self.block_submit(self.nodes[0], [test5tx], True)

        self.log.info(
            "Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]"
        )
        for i in test6txs:
            self.nodes[0].sendrawtransaction(
                bytes_to_hex_str(i.serialize_with_witness()), True)
        self.block_submit(self.nodes[0], test6txs, True, True)
Example #39
0
    def run_test(self):
        # Start mininode connection
        self.node = self.nodes[0]
        start_p2p_connection([self.node])

        block_gen_thread = BlockGenThread([self.node],
                                          self.log,
                                          num_txs=10000,
                                          interval_fixed=0.02)
        block_gen_thread.start()
        tx_n = 500000
        batch_size = 10
        send_tps = 20000
        all_txs = []
        gas_price = 1
        account_n = 1000

        generate = False
        if generate:
            f = open("encoded_tx", "wb")
            '''Test Random Transactions'''

            self.log.info("Setting up genesis secrets")
            balance_map = {}
            nonce_map = {}
            account_i = 0
            for prikey_str in open(
                    self.conf_parameters["genesis_secrets"][1:-1],
                    "r").readlines():
                prikey = decode_hex(prikey_str[:-1])
                balance_map[prikey] = 10000000000000000000000
                nonce_map[prikey] = 0
                account_i += 1
                if account_i == account_n:
                    break

            # genesis_key = default_config["GENESIS_PRI_KEY"]
            # balance_map = {genesis_key: default_config["TOTAL_COIN"]}
            # nonce_map = {genesis_key: 0}
            # # Initialize new accounts
            # new_keys = set()
            # for _ in range(account_n):
            #     value = int(balance_map[genesis_key] / (account_n * 2))
            #     receiver_sk, _ = ec_random_keys()
            #     new_keys.add(receiver_sk)
            #     tx = create_transaction(pri_key=genesis_key, receiver=priv_to_addr(receiver_sk), value=value,
            #                             nonce=nonce_map[genesis_key], gas_price=gas_price)
            #     all_txs.append(tx)
            #     balance_map[receiver_sk] = value
            #     nonce_map[genesis_key] += 1
            #     balance_map[genesis_key] -= value + gas_price * 21000
            # wait_for_account_stable()
            # for key in new_keys:
            #     nonce_map[key] = wait_for_initial_nonce_for_privkey(self.nodes[0], key)

            self.log.info("start to generate %d transactions", tx_n)
            account_list = list(balance_map)
            for i in range(tx_n):
                if i % 1000 == 0:
                    self.log.info("generated %d tx", i)
                sender_key = random.choice(account_list)
                if sender_key not in nonce_map:
                    nonce_map[sender_key] = wait_for_initial_nonce_for_privkey(
                        self.nodes[0], sender_key)
                nonce = nonce_map[sender_key]
                value = 1
                receiver_sk = random.choice(account_list)
                balance_map[receiver_sk] += value
                # not enough transaction fee (gas_price * gas_limit) should not happen for now
                assert balance_map[sender_key] >= value + gas_price * 21000
                tx = create_transaction(pri_key=sender_key,
                                        receiver=priv_to_addr(receiver_sk),
                                        value=value,
                                        nonce=nonce,
                                        gas_price=gas_price)
                # self.log.debug("%s send %d to %s nonce=%d balance: sender=%s, receiver=%s", encode_hex(priv_to_addr(sender_key)), value, encode_hex(priv_to_addr(receiver_sk)), nonce, balance_map[sender_key], balance_map[receiver_sk])
                all_txs.append(tx)
                nonce_map[sender_key] = nonce + 1
                balance_map[sender_key] -= value + gas_price * 21000
            encoded_txs = []
            batch_tx = []
            i = 0
            for tx in all_txs:
                batch_tx.append(tx)
                i += 1
                if i % batch_size == 0:
                    encoded = rlp.encode(Transactions(transactions=batch_tx))
                    encoded_txs.append(encoded)
                    batch_tx = []
            pickle.dump(encoded_txs, f)
            pickle.dump(balance_map, f)
        else:
            f = open("encoded_tx", "rb")
            encoded_txs = pickle.load(f)
            balance_map = pickle.load(f)

        f.close()
        start_time = time.time()
        i = 0
        for encoded in encoded_txs:
            i += 1
            if i * batch_size % send_tps == 0:
                time_used = time.time() - start_time
                time.sleep(i * batch_size / send_tps - time_used)
            self.node.p2p.send_protocol_packet(encoded +
                                               int_to_bytes(TRANSACTIONS))
        self.log.info(
            f"Time used to send {tx_n} transactions in {i} iterations: {time_used}"
        )

        for k in balance_map:
            wait_until(lambda: self.check_account(k, balance_map), timeout=600)
        time_used = time.time() - start_time
        block_gen_thread.stop()
        block_gen_thread.join()
        self.log.info("Time used: %f seconds", time_used)
        self.log.info("Tx per second: %f", tx_n / time_used)
Example #40
0
 def create_spend_tx(self, scriptSig):
     self.tx2 = create_transaction(self.tx1, 0, CScript(), 0)
     self.tx2.vin[0].scriptSig = scriptSig
     self.tx2.vout[0].scriptPubKey = CScript()
     self.tx2.rehash()
Example #41
0
    def run_test(self):
        self.nodes[0].add_p2p_connection(P2PInterface())

        self.log.info("Mining %d blocks", CLTV_HEIGHT - 2)
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(CLTV_HEIGHT - 2)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that an invalid-according-to-CLTV transaction can still appear in a block"
        )

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[0],
                                     self.nodeaddress,
                                     amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1),
                             block_time)
        block.nVersion = 3
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least version 4")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time)
        block.nVersion = 3
        block.solve()
        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(),
                   lock=mininode_lock)
        with mininode_lock:
            assert_equal(self.nodes[0].p2p.last_message["reject"].code,
                         REJECT_OBSOLETE)
            assert_equal(self.nodes[0].p2p.last_message["reject"].reason,
                         b'bad-version(0x00000003)')
            assert_equal(self.nodes[0].p2p.last_message["reject"].data,
                         block.sha256)
            del self.nodes[0].p2p.last_message["reject"]

        self.log.info(
            "Test that invalid-according-to-cltv transactions cannot appear in a block"
        )
        block.nVersion = 4

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[1],
                                     self.nodeaddress,
                                     amount=1.0)
        cltv_invalidate(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for CLTV by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.hash,
            'allowed':
            False,
            'reject-reason':
            '64: non-mandatory-script-verify-flag (Negative locktime)'
        }],
                     self.nodes[0].testmempoolaccept(
                         rawtxs=[bytes_to_hex_str(spendtx.serialize())],
                         allowhighfees=True))

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)

        wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(),
                   lock=mininode_lock)
        with mininode_lock:
            assert self.nodes[0].p2p.last_message["reject"].code in [
                REJECT_INVALID, REJECT_NONSTANDARD
            ]
            assert_equal(self.nodes[0].p2p.last_message["reject"].data,
                         block.sha256)
            if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID:
                # Generic rejection when a block is invalid
                assert_equal(self.nodes[0].p2p.last_message["reject"].reason,
                             b'block-validation-failed')
            else:
                assert b'Negative locktime' in self.nodes[0].p2p.last_message[
                    "reject"].reason

        self.log.info(
            "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted"
        )
        spendtx = cltv_validate(self.nodes[0], spendtx, CLTV_HEIGHT - 1)
        spendtx.rehash()

        block.vtx.pop(1)
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()

        self.nodes[0].p2p.send_and_ping(msg_block(block))
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
def send_generic_input_tx(node, coinbases, address):
    return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction(node, node.getblock(coinbases.pop())['tx'][0], address, amount=Decimal("49.99")))))
Example #43
0
    def run_test(self):
        # Setup the p2p connections and start up the network thread.
        test_node = NodeConnCB()  # connects to node0
        min_work_node = NodeConnCB()  # connects to node1

        connections = []
        connections.append(
            NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
        connections.append(
            NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], min_work_node))
        test_node.add_connection(connections[0])
        min_work_node.add_connection(connections[1])

        NetworkThread().start()  # Start up network handling in another thread

        # Test logic begins here
        test_node.wait_for_verack()
        min_work_node.wait_for_verack()

        # 1. Have nodes mine a block (leave IBD)
        [n.generate(1) for n in self.nodes]
        tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes]

        # 2. Send one block that builds on each tip.
        # This should be accepted by node0
        blocks_h2 = []  # the height 2 blocks on each node's chain
        block_time = int(time.time()) + 1
        for i in range(2):
            blocks_h2.append(
                create_block(tips[i], create_coinbase(2), block_time))
            blocks_h2[i].solve()
            block_time += 1
        test_node.send_message(msg_block(blocks_h2[0]))
        min_work_node.send_message(msg_block(blocks_h2[1]))

        for x in [test_node, min_work_node]:
            x.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        assert_equal(self.nodes[1].getblockcount(), 1)
        self.log.info(
            "First height 2 block accepted by node0; correctly rejected by node1"
        )

        # 3. Send another block that builds on genesis.
        block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0),
                                 create_coinbase(1), block_time)
        block_time += 1
        block_h1f.solve()
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h1f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert (tip_entry_found)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, block_h1f.hash)

        # 4. Send another two block that build on the fork.
        block_h2f = create_block(block_h1f.sha256, create_coinbase(2),
                                 block_time)
        block_time += 1
        block_h2f.solve()
        test_node.send_message(msg_block(block_h2f))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h2f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert (tip_entry_found)

        # But this block should be accepted by node since it has equal work.
        self.nodes[0].getblock(block_h2f.hash)
        self.log.info("Second height 2 block accepted, but not reorg'ed to")

        # 4b. Now send another block that builds on the forking chain.
        block_h3 = create_block(block_h2f.sha256, create_coinbase(3),
                                block_h2f.nTime + 1)
        block_h3.solve()
        test_node.send_message(msg_block(block_h3))

        test_node.sync_with_ping()
        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h3.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert (tip_entry_found)
        self.nodes[0].getblock(block_h3.hash)

        # But this block should be accepted by node since it has more work.
        self.nodes[0].getblock(block_h3.hash)
        self.log.info("Unrequested more-work block accepted")

        # 4c. Now mine 288 more blocks and deliver; all should be processed but
        # the last (height-too-high) on node (as long as its not missing any headers)
        tip = block_h3
        all_blocks = []
        for i in range(288):
            next_block = create_block(tip.sha256, create_coinbase(i + 4),
                                      tip.nTime + 1)
            next_block.solve()
            all_blocks.append(next_block)
            tip = next_block

        # Now send the block at height 5 and check that it wasn't accepted (missing header)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock,
                                all_blocks[1].hash)
        assert_raises_rpc_error(-5, "Block not found",
                                self.nodes[0].getblockheader,
                                all_blocks[1].hash)

        # The block at height 5 should be accepted if we provide the missing header, though
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(all_blocks[0]))
        test_node.send_message(headers_message)
        test_node.send_message(msg_block(all_blocks[1]))
        test_node.sync_with_ping()
        self.nodes[0].getblock(all_blocks[1].hash)

        # Now send the blocks in all_blocks
        for i in range(288):
            test_node.send_message(msg_block(all_blocks[i]))
        test_node.sync_with_ping()

        # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
        for x in all_blocks[:-1]:
            self.nodes[0].getblock(x.hash)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, all_blocks[-1].hash)

        # 5. Test handling of unrequested block on the node that didn't process
        # Should still not be processed (even though it has a child that has more
        # work).

        # The node should have requested the blocks at some point, so
        # disconnect/reconnect first
        connections[0].disconnect_node()
        test_node.wait_for_disconnect()

        test_node = NodeConnCB()  # connects to node (not whitelisted)
        connections[0] = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0],
                                  test_node)
        test_node.add_connection(connections[0])

        test_node.wait_for_verack()
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 2)
        self.log.info(
            "Unrequested block that would complete more-work chain was ignored"
        )

        # 6. Try to get node to request the missing block.
        # Poke the node with an inv for block at height 3 and see if that
        # triggers a getdata on block 2 (it should if block 2 is missing).
        with mininode_lock:
            # Clear state so we can check the getdata request
            test_node.last_message.pop("getdata", None)
            test_node.send_message(msg_inv([CInv(2, block_h3.sha256)]))

        test_node.sync_with_ping()
        with mininode_lock:
            getdata = test_node.last_message["getdata"]

        # Check that the getdata includes the right block
        assert_equal(getdata.inv[0].hash, block_h1f.sha256)
        self.log.info("Inv at tip triggered getdata for unprocessed block")

        # 7. Send the missing block for the third time (now it is requested)
        test_node.send_message(msg_block(block_h1f))

        test_node.sync_with_ping()
        assert_equal(self.nodes[0].getblockcount(), 290)
        self.nodes[0].getblock(all_blocks[286].hash)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, all_blocks[287].hash)
        self.log.info(
            "Successfully reorged to longer chain from non-whitelisted peer")

        # 8. Create a chain which is invalid at a height longer than the
        # current chain, but which has more blocks on top of that
        block_289f = create_block(all_blocks[284].sha256, create_coinbase(289),
                                  all_blocks[284].nTime + 1)
        block_289f.solve()
        block_290f = create_block(block_289f.sha256, create_coinbase(290),
                                  block_289f.nTime + 1)
        block_290f.solve()
        block_291 = create_block(block_290f.sha256, create_coinbase(291),
                                 block_290f.nTime + 1)
        # block_291 spends a coinbase below maturity!
        block_291.vtx.append(create_transaction(block_290f.vtx[0], 0, b"42",
                                                1))
        block_291.hashMerkleRoot = block_291.calc_merkle_root()
        block_291.solve()
        block_292 = create_block(block_291.sha256, create_coinbase(292),
                                 block_291.nTime + 1)
        block_292.solve()

        # Now send all the headers on the chain and enough blocks to trigger reorg
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_289f))
        headers_message.headers.append(CBlockHeader(block_290f))
        headers_message.headers.append(CBlockHeader(block_291))
        headers_message.headers.append(CBlockHeader(block_292))
        test_node.send_message(headers_message)

        test_node.sync_with_ping()
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_292.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert (tip_entry_found)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, block_292.hash)

        test_node.send_message(msg_block(block_289f))
        test_node.send_message(msg_block(block_290f))

        test_node.sync_with_ping()
        self.nodes[0].getblock(block_289f.hash)
        self.nodes[0].getblock(block_290f.hash)

        test_node.send_message(msg_block(block_291))

        # At this point we've sent an obviously-bogus block, wait for full processing
        # without assuming whether we will be disconnected or not
        try:
            # Only wait a short while so the test doesn't take forever if we do get
            # disconnected
            test_node.sync_with_ping(timeout=1)
        except AssertionError:
            test_node.wait_for_disconnect()

            test_node = NodeConnCB()  # connects to node (not whitelisted)
            connections[0] = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0],
                                      test_node)
            test_node.add_connection(connections[0])

            NetworkThread().start(
            )  # Start up network handling in another thread
            test_node.wait_for_verack()

        # We should have failed reorg and switched back to 290 (but have block 291)
        assert_equal(self.nodes[0].getblockcount(), 290)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"],
                     -1)

        # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected
        block_293 = create_block(block_292.sha256, create_coinbase(293),
                                 block_292.nTime + 1)
        block_293.solve()
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_293))
        test_node.send_message(headers_message)
        test_node.wait_for_disconnect()

        # 9. Connect node1 to node0 and ensure it is able to sync
        connect_nodes(self.nodes[0], 1)
        sync_blocks([self.nodes[0], self.nodes[1]])
        self.log.info("Successfully synced nodes 1 and 0")

        [c.disconnect_node() for c in connections]
Example #44
0
    def run_test(self):
        # Add p2p connection to node0
        node = self.nodes[0]  # convenience reference to the node
        node.add_p2p_connection(P2PDataStore())
        node.p2p.wait_for_verack()

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        self.log.info("Create a new block with an anyone-can-spend coinbase")

        height = 1
        block = create_block(tip, create_coinbase(height), block_time)
        block.solve()
        # Save the coinbase for later
        block1 = block
        tip = block.sha256
        node.p2p.send_blocks_and_test([block1], node, True)

        self.log.info("Mature the block.")
        node.generate(100)

        best_block = node.getblock(node.getbestblockhash())
        tip = int(node.getbestblockhash(), 16)
        height = best_block["height"] + 1
        block_time = best_block["time"] + 1

        # Use merkle-root malleability to generate an invalid block with
        # same blockheader.
        # Manufacture a block with 3 transactions (coinbase, spend of prior
        # coinbase, spend of that spend).  Duplicate the 3rd transaction to
        # leave merkle root and blockheader unchanged but invalidate the block.
        self.log.info("Test merkle root malleability.")

        block2 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1

        # b'0x51' is OP_TRUE
        tx1 = create_transaction(block1.vtx[0], 0, b'\x51', 50 * COIN)
        tx2 = create_transaction(tx1, 0, b'\x51', 50 * COIN)

        block2.vtx.extend([tx1, tx2])
        block2.hashMerkleRoot = block2.calc_merkle_root()
        block2.rehash()
        block2.solve()
        orig_hash = block2.sha256
        block2_orig = copy.deepcopy(block2)

        # Mutate block 2
        block2.vtx.append(tx2)
        assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
        assert_equal(orig_hash, block2.rehash())
        assert (block2_orig.vtx != block2.vtx)

        node.p2p.send_blocks_and_test([block2], node, False, False, 16,
                                      b'bad-txns-duplicate')

        self.log.info("Test very broken block.")

        block3 = create_block(tip, create_coinbase(height), block_time)
        block_time += 1
        block3.vtx[0].vout[0].nValue = 100 * COIN  # Too high!
        block3.vtx[0].sha256 = None
        block3.vtx[0].calc_sha256()
        block3.hashMerkleRoot = block3.calc_merkle_root()
        block3.rehash()
        block3.solve()

        node.p2p.send_blocks_and_test([block3], node, False, False, 16,
                                      b'bad-cb-amount')
Example #45
0
    def run_test(self):
        peer = self.nodes[0].add_p2p_connection(P2PInterface())

        self.test_dersig_info(is_active=False)

        self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
        self.coinbase_txids = [
            self.nodes[0].getblock(b)['tx'][0]
            for b in self.nodes[0].generate(DERSIG_HEIGHT - 2)
        ]
        self.nodeaddress = self.nodes[0].getnewaddress()

        self.log.info(
            "Test that a transaction with non-DER signature can still appear in a block"
        )

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[0],
                                     self.nodeaddress,
                                     amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
        block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1),
                             block_time)
        block.nVersion = VB_TOP_BITS
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.test_dersig_info(
            is_active=False
        )  # Not active as of current tip and next block does not need to obey rules
        peer.send_and_ping(msg_block(block))
        self.test_dersig_info(
            is_active=True
        )  # Not active as of current tip, but next block must obey rules
        assert_equal(self.nodes[0].getbestblockhash(), block.hash)

        self.log.info("Test that blocks must now be at least VB_TOP_BITS")
        tip = block.sha256
        block_time += 1
        block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time)
        block.nVersion = 2
        block.rehash()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                '{}, bad-version(0x00000002)'.format(block.hash)
        ]):
            peer.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            peer.sync_with_ping()

        self.log.info(
            "Test that transactions with non-DER signatures cannot appear in a block"
        )
        block.nVersion = VB_TOP_BITS

        spendtx = create_transaction(self.nodes[0],
                                     self.coinbase_txids[1],
                                     self.nodeaddress,
                                     amount=1.0)
        unDERify(spendtx)
        spendtx.rehash()

        # First we show that this tx is valid except for DERSIG by getting it
        # rejected from the mempool for exactly that reason.
        assert_equal([{
            'txid':
            spendtx.hash,
            'allowed':
            False,
            'reject-reason':
            'non-mandatory-script-verify-flag (Non-canonical DER signature)'
        }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()],
                                            maxfeerate=0))

        # Now we verify that a block with this transaction is also invalid.
        block.vtx.append(spendtx)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        with self.nodes[0].assert_debug_log(expected_msgs=[
                'CheckInputScripts on {} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)'
                .format(block.vtx[-1].hash)
        ]):
            peer.send_and_ping(msg_block(block))
            assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
            peer.sync_with_ping()

        self.log.info(
            "Test that a version 3 block with a DERSIG-compliant transaction is accepted"
        )
        block.vtx[1] = create_transaction(self.nodes[0],
                                          self.coinbase_txids[1],
                                          self.nodeaddress,
                                          amount=1.0)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.rehash()
        block.solve()

        self.test_dersig_info(
            is_active=True
        )  # Not active as of current tip, but next block must obey rules
        peer.send_and_ping(msg_block(block))
        self.test_dersig_info(is_active=True)  # Active as of current tip
        assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
def create_bip112special(node, input, txversion, address):
    tx = create_transaction(node, input, address, amount=Decimal("49.98"))
    tx.nVersion = txversion
    signtx = sign_transaction(node, tx)
    signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
    return signtx
Example #47
0
    def run_test(self):
        # Prevent easysolc from configuring the root logger to print to stderr
        self.log.propagate = False

        solc = Solc()
        erc20_contract = solc.get_contract_instance(
            source=os.path.dirname(os.path.realpath(__file__)) + "/erc20.sol",
            contract_name="FixedSupplyToken")

        start_p2p_connection(self.nodes)

        self.log.info("Initializing contract")
        genesis_key = default_config["GENESIS_PRI_KEY"]
        genesis_addr = privtoaddr(genesis_key)
        nonce = 0
        gas_price = 1
        gas = 50000000
        block_gen_thread = BlockGenThread(self.nodes, self.log)
        block_gen_thread.start()
        self.tx_conf = {
            "from": Web3.toChecksumAddress(encode_hex_0x(genesis_addr)),
            "nonce": int_to_hex(nonce),
            "gas": int_to_hex(gas),
            "gasPrice": int_to_hex(gas_price)
        }
        raw_create = erc20_contract.constructor().buildTransaction(
            self.tx_conf)
        tx_data = decode_hex(raw_create["data"])
        tx_create = create_transaction(pri_key=genesis_key,
                                       receiver=b'',
                                       nonce=nonce,
                                       gas_price=gas_price,
                                       data=tx_data,
                                       gas=gas,
                                       value=0)
        self.nodes[0].p2p.send_protocol_msg(
            Transactions(transactions=[tx_create]))
        self.wait_for_tx([tx_create])
        self.log.info("Contract created, start transfering tokens")

        tx_n = 10
        self.tx_conf["to"] = Web3.toChecksumAddress(
            encode_hex_0x(sha3_256(rlp.encode([genesis_addr, nonce]))[-20:]))
        nonce += 1
        balance_map = {genesis_key: 1000000 * 10**18}
        sender_key = genesis_key
        all_txs = []
        for i in range(tx_n):
            value = int((balance_map[sender_key] -
                         ((tx_n - i) * 21000 * gas_price)) * random.random())
            receiver_sk, _ = ec_random_keys()
            balance_map[receiver_sk] = value
            tx_data = decode_hex(
                erc20_contract.functions.transfer(
                    Web3.toChecksumAddress(encode_hex(
                        privtoaddr(receiver_sk))),
                    value).buildTransaction(self.tx_conf)["data"])
            tx = create_transaction(pri_key=sender_key,
                                    receiver=decode_hex(self.tx_conf["to"]),
                                    value=0,
                                    nonce=nonce,
                                    gas=gas,
                                    gas_price=gas_price,
                                    data=tx_data)
            r = random.randint(0, self.num_nodes - 1)
            self.nodes[r].p2p.send_protocol_msg(
                Transactions(transactions=[tx]))
            nonce += 1
            balance_map[sender_key] -= value
            all_txs.append(tx)
        self.log.info("Wait for transactions to be executed")
        self.wait_for_tx(all_txs)
        self.log.info("Check final token balance")
        for sk in balance_map:
            addr = privtoaddr(sk)
            assert_equal(self.get_balance(erc20_contract, addr, nonce),
                         balance_map[sk])
        block_gen_thread.stop()
        block_gen_thread.join()
        sync_blocks(self.nodes)
        self.log.info("Pass")