def run_test(self): with self.run_node_with_connections( "Scenario 1", 0, [ '-banscore=100000', '-genesisactivationheight=110', '-maxstdtxvalidationduration=100' ], number_of_connections=1) as (conn, ): coinbase1 = make_new_block(conn) for _ in range(110): make_new_block(conn) tx_parent = create_transaction(coinbase1, 0, CScript(), coinbase1.vout[0].nValue - 1000, CScript([OP_TRUE])) tx_parent.rehash() tx_orphan = make_big_orphan( tx_parent, DEFAULT_MAX_TX_SIZE_POLICY_AFTER_GENESIS) assert_equal(len(tx_orphan.serialize()), DEFAULT_MAX_TX_SIZE_POLICY_AFTER_GENESIS) conn.send_message(msg_tx(tx_orphan)) # Making sure parent is not sent right away for bitcond to detect an orphan time.sleep(1) conn.send_message(msg_tx(tx_parent)) self.check_mempool(conn.rpc, [tx_parent, tx_orphan])
def get_tests(self): # shorthand for functions block = self.chain.next_block node = self.nodes[0] self.chain.set_genesis_hash( int(node.getbestblockhash(), 16) ) block(0) yield self.accepted() test, out, _ = prepare_init_chain(self.chain, 100, 100) yield test # Create transaction with OP_RETURN in the locking script. tx1 = create_transaction(out[1].tx, out[1].n, b"", 100000, CScript([OP_RETURN])) self.test.connections[0].send_message(msg_tx(tx1)) # wait for transaction processing sleep(1) # Mine block (height 102) with new transaction. self.nodes[0].generate(1) # Obtain newly mined block. It should contain new transaction tx1. tx = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx'] assert_equal(len(tx), 2) assert_equal(tx1.hash, tx[1]) self.log.info("Created transaction %s on height %d", tx1.hash, self.genesisactivationheight-2) # Create transaction with OP_TRUE in the unlocking that tries to spend tx1. tx2 = create_transaction(tx1, 0, b'\x51', 1, CScript([OP_TRUE])) self.test.connections[0].send_message(msg_tx(tx2)) # wait for transaction processing sleep(1) # Mine block (height 103). self.nodes[0].generate(1) # Obtain newly mined block. It should NOT contain new transaction tx2. tx = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx'] assert_equal(len(tx), 1) self.log.info("Created transaction %s on height %d that tries to spend transaction on height %d", tx2.hash, self.genesisactivationheight-1, self.genesisactivationheight-2) # Create transaction with OP_RETURN in the locking script. tx3 = create_transaction(out[2].tx, out[2].n, b"", 100000, CScript([OP_RETURN])) self.test.connections[0].send_message(msg_tx(tx3)) # Create transaction with OP_TRUE in the unlocking that tries to spend tx3. tx4 = create_transaction(tx3, 0, b'\x51', 1, CScript([OP_TRUE])) self.test.connections[0].send_message(msg_tx(tx4)) # wait for transaction processing sleep(1) # Mine block (height 104) with new transactions. self.nodes[0].generate(1) # Obtain newly mined block. It should contain new transactions tx3 and tx4. tx = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx'] assert_equal(len(tx), 3) assert_equal(tx3.hash, tx[1]) assert_equal(tx4.hash, tx[2]) self.log.info("Created transactions %s and %s on height %d that tries to spend transaction on height %d", tx3.hash, tx4.hash, self.genesisactivationheight, self.genesisactivationheight)
def run_test(self): test_node = TestNode() connections = [] connections.append( NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node, "regtest", True)) test_node.add_connection(connections[0]) # Start up network handling in another thread NetworkThread().start() test_node.wait_for_verack() # Verify mininodes are connected to zcashd nodes peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) self.coinbase_blocks = self.nodes[0].generate(1) self.nodes[0].generate(100) self.nodeaddress = self.nodes[0].getnewaddress() # Mininodes send transaction to zcashd node. def setExpiryHeight(tx): tx.nExpiryHeight = 101 spendtx = self.create_transaction(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 1.0, txModifier=setExpiryHeight) test_node.send_message(msg_tx(spendtx)) time.sleep(3) # Verify test mininode has not been dropped # and still has a banscore of 0. peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) # Mine a block and resend the transaction self.nodes[0].generate(1) test_node.send_message(msg_tx(spendtx)) time.sleep(3) # Verify test mininode has not been dropped # but has a banscore of 10. peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(10, peerinfo[0]["banscore"]) [c.disconnect_node() for c in connections]
def get_tests(self): # shorthand for functions block = self.chain.next_block node = get_rpc_proxy(self.nodes[0].url, 1, timeout=6000, coveragedir=self.nodes[0].coverage_dir) self.chain.set_genesis_hash(int(node.getbestblockhash(), 16)) block(0) yield self.accepted() test, out, _ = prepare_init_chain(self.chain, 200, 200) yield test txHashes = [] for i in range(18): txLarge = create_transaction( out[i].tx, out[i].n, b"", ONE_MEGABYTE * 256, CScript([ OP_FALSE, OP_RETURN, bytearray([42] * (ONE_MEGABYTE * 256)) ])) self.test.connections[0].send_message(msg_tx(txLarge)) self.check_mempool(node, [txLarge], timeout=6000) txHashes.append([txLarge.hash, txLarge.sha256]) txOverflow = create_transaction( out[18].tx, out[18].n, b"", ONE_MEGABYTE * 305, CScript( [OP_FALSE, OP_RETURN, bytearray([42] * (ONE_MEGABYTE * 305))])) self.test.connections[0].send_message(msg_tx(txOverflow)) self.check_mempool(node, [txOverflow], timeout=6000) txHashes.append([txOverflow.hash, txOverflow.sha256]) txOverflow = create_transaction( out[19].tx, out[19].n, b"", ONE_MEGABYTE, CScript([OP_FALSE, OP_RETURN, bytearray([42] * ONE_MEGABYTE)])) self.test.connections[0].send_message(msg_tx(txOverflow)) self.check_mempool(node, [txOverflow], timeout=6000) txHashes.append([txOverflow.hash, txOverflow.sha256]) # Mine block with new transactions. self.log.info("BLOCK 2 - mining") minedBlock2 = node.generate(1) self.log.info("BLOCK 2 - mined") for txHash in txHashes: tx = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txHash[0])) tx.rehash() assert_equal(tx.sha256, txHash[1])
def run_test(self): test_node = TestNode() connections = [] connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node, "regtest", True)) test_node.add_connection(connections[0]) # Start up network handling in another thread NetworkThread().start() test_node.wait_for_verack() # Verify mininodes are connected to zcashd nodes peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) self.coinbase_blocks = self.nodes[0].generate(1) self.nodes[0].generate(100) self.nodeaddress = self.nodes[0].getnewaddress() # Mininodes send transaction to zcashd node. def setExpiryHeight(tx): tx.nExpiryHeight = 101 spendtx = self.create_transaction(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 1.0, txModifier=setExpiryHeight) test_node.send_message(msg_tx(spendtx)) time.sleep(3) # Verify test mininode has not been dropped # and still has a banscore of 0. peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) # Mine a block and resend the transaction self.nodes[0].generate(1) test_node.send_message(msg_tx(spendtx)) time.sleep(3) # Verify test mininode has not been dropped # but has a banscore of 10. peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(10, peerinfo[0]["banscore"]) [ c.disconnect_node() for c in connections ]
def get_tests(self): # shorthand for functions block = self.chain.next_block node = get_rpc_proxy(self.nodes[0].url, 1, timeout=6000, coveragedir=self.nodes[0].coverage_dir) self.chain.set_genesis_hash( int(node.getbestblockhash(), 16) ) # Create a new block block(0) self.chain.save_spendable_output() yield self.accepted() # Now we need that block to mature so we can spend the coinbase. test = TestInstance(sync_every_block=False) for i in range(200): block(5000 + i) test.blocks_and_transactions.append([self.chain.tip, True]) self.chain.save_spendable_output() yield test # Collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(200): out.append(self.chain.get_spendable_output()) txHashes = [] for i in range(18): txLarge = create_transaction(out[i].tx, out[i].n, b"", ONE_MEGABYTE * 256, CScript([OP_FALSE, OP_RETURN, bytearray([42] * (ONE_MEGABYTE * 256))])) self.test.connections[0].send_message(msg_tx(txLarge)) self.check_mempool(node, [txLarge]) txHashes.append([txLarge.hash, txLarge.sha256]) txOverflow = create_transaction(out[18].tx, out[18].n, b"", ONE_MEGABYTE * 305, CScript([OP_FALSE, OP_RETURN, bytearray([42] * (ONE_MEGABYTE * 305))])) self.test.connections[0].send_message(msg_tx(txOverflow)) self.check_mempool(node, [txOverflow]) txHashes.append([txOverflow.hash, txOverflow.sha256]) txOverflow = create_transaction(out[19].tx, out[19].n, b"", ONE_MEGABYTE, CScript([OP_FALSE, OP_RETURN, bytearray([42] * ONE_MEGABYTE)])) self.test.connections[0].send_message(msg_tx(txOverflow)) self.check_mempool(node, [txOverflow]) txHashes.append([txOverflow.hash, txOverflow.sha256]) # Mine block with new transactions. self.log.info("BLOCK 2 - mining") minedBlock2 = node.generate(1) self.log.info("BLOCK 2 - mined") for txHash in txHashes: tx = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txHash[0])) tx.rehash() assert_equal(tx.sha256, txHash[1])
def test_compactblock_reconstruction_multiple_peers( self, node, stalling_peer, delivery_peer): assert (len(self.utxos)) def announce_cmpct_block(node, peer): utxo = self.utxos.pop(0) block = self.build_block_with_transactions(node, utxo, 5) cmpct_block = HeaderAndShortIDs() cmpct_block.initialize_from_block(block) msg = msg_cmpctblock(cmpct_block.to_p2p()) peer.send_and_ping(msg) with mininode_lock: assert "getblocktxn" in peer.last_message return block, cmpct_block block, cmpct_block = announce_cmpct_block(node, stalling_peer) for tx in block.vtx[1:]: delivery_peer.send_message(msg_tx(tx)) delivery_peer.sync_with_ping() mempool = node.getrawmempool() for tx in block.vtx[1:]: assert (tx.hash in mempool) delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p())) assert_equal(int(node.getbestblockhash(), 16), block.sha256) self.utxos.append( [block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue]) # Now test that delivering an invalid compact block won't break relay block, cmpct_block = announce_cmpct_block(node, stalling_peer) for tx in block.vtx[1:]: delivery_peer.send_message(msg_tx(tx)) delivery_peer.sync_with_ping() cmpct_block.prefilled_txn[0].tx.wit.vtxinwit = [CTxInWitness()] cmpct_block.prefilled_txn[0].tx.wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ] cmpct_block.use_witness = True delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p())) assert (int(node.getbestblockhash(), 16) != block.sha256) msg = msg_blocktxn() msg.block_transactions.blockhash = block.sha256 msg.block_transactions.transactions = block.vtx[1:] stalling_peer.send_and_ping(msg) assert_equal(int(node.getbestblockhash(), 16), block.sha256)
def get_tests(self): # shorthand for functions block = self.chain.next_block node = self.nodes[0] self.chain.set_genesis_hash(int(node.getbestblockhash(), 16)) # Create a new block block(0) self.chain.save_spendable_output() yield self.accepted() # Now we need that block to mature so we can spend the coinbase. test = TestInstance(sync_every_block=False) for i in range(100): block(5000 + i) test.blocks_and_transactions.append([self.chain.tip, True]) self.chain.save_spendable_output() yield test # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(100): out.append(self.chain.get_spendable_output()) # Create transaction with OP_RETURN in the locking script. tx1 = create_transaction(out[0].tx, out[0].n, b'', 100000, CScript([OP_RETURN])) self.test.connections[0].send_message(msg_tx(tx1)) # wait for transaction processing sleep(1) # generate an empty block, height is 102 block(1, spend=out[1]) yield self.accepted() tx2 = create_transaction(tx1, 0, b'\x51', 1, CScript([OP_TRUE])) self.test.connections[0].send_message(msg_tx(tx2)) # wait for transaction processing sleep(1) # Mine block (height 103) with new transactions. self.nodes[0].generate(1) tx = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx'] assert_equal(len(tx), 3) assert_equal(tx1.hash, tx[1]) assert_equal(tx2.hash, tx[2])
def run_scenario2(self, conn, spend, num_txs_to_create, locking_script, additional_txs=[], shuffle_txs=False, money_to_spend=2000000, timeout=60): # A handler to catch any reject messages. # - it is expected to get only 'too-long-validation-time' reject msgs. rejected_txs = [] def on_reject(conn, msg): assert_equal(msg.reason, b'too-long-validation-time') rejected_txs.append(msg) conn.cb.on_reject = on_reject # Create and send tx chains with non-std outputs. # - one tx with vout_size=num_txs_to_create outpoints will be created txchains = self.generate_and_send_txchains_n(conn, 1, 1, spend, locking_script, money_to_spend, num_txs_to_create, timeout) # Check if required transactions are accepted by the mempool. self.check_mempool(conn.rpc, txchains, timeout) # Create a new block # - having an empty mempool (before submitting non-std txs) will simplify further checks. conn.rpc.generate(1) # Create and send transactions spending non-std outputs. nonstd_txs = self.generate_transactons(txchains, CScript([OP_TRUE]), locking_script) all_txs = nonstd_txs + additional_txs if shuffle_txs: random.shuffle(all_txs) for tx in all_txs: conn.send_message(msg_tx(tx)) # Check if the validation queues are empty. conn.rpc.waitforptvcompletion() return nonstd_txs+additional_txs, rejected_txs
def run_scenario1(self, conn, num_of_chains, chain_length, spend, allowhighfees=False, dontcheckfee=False, timeout=30): # Create tx chains. txchains = self.get_txchains_n(num_of_chains, chain_length, spend) # Send txns, one by one, through p2p interface. for tx in range(len(txchains)): conn.send_message(msg_tx(txchains[tx])) # Check if there is an expected number of transactions in the validation queues # - this scenario relies on ptv delayed processing # - ptv is required to be paused wait_until(lambda: conn.rpc.getblockchainactivity()["transactions"] == num_of_chains * chain_length, timeout=timeout) # No transactions should be in the mempool. assert_equal(conn.rpc.getmempoolinfo()['size'], 0) # Resubmit txns through rpc interface # - there should be num_of_chains*chain_length txns detected as known transactions # - due to the fact that all were already received via p2p interface for tx in range(len(txchains)): assert_raises_rpc_error(-26, "txn-already-known", conn.rpc.sendrawtransaction, ToHex(txchains[tx]), allowhighfees, dontcheckfee) # No transactions should be in the mempool. assert_equal(conn.rpc.getmempoolinfo()['size'], 0) return txchains
def _process_p2p_rejects(self, connection, to_reject, reasons, test_label, height_label): rejects = [] def on_reject(_, msg): rejects.append(msg) with connection.cb.temporary_override_callback(on_reject=on_reject): for tx, reason in zip(to_reject, reasons): self.log.info( f"Sending and processing the reject tx {loghash(tx.hash)} for expecting reason {reason}" ) del rejects[:] connection.send_message(msg_tx(tx)) wait_until( lambda: (len(rejects) == 1) and rejects[0].data == tx.sha256, timeout=30, check_interval=0.2, label= f"Waiting tx to be rejected. Reason {reason} At {test_label} {height_label} tx:{tx.hash}" ) if reason: assert rejects[ 0].reason == reason, f"Mismatching rejection reason: got {rejects[0].reason} expected {reason}" self.log.info( f"Tx {loghash(tx.hash)} is rejected as expected for reason {reason}" )
def run_scenario3(self, conn, num_of_chains, chain_length, spend, allowhighfees=False, dontcheckfee=False, timeout=30): # Create and send tx chains. txchains = self.get_txchains_n(num_of_chains, chain_length, spend) # Prepare inputs for sendrawtransactions rpc_txs_bulk_input = [] for tx in range(len(txchains)): # Collect txn input data for bulk submit through rpc interface. rpc_txs_bulk_input.append({ 'hex': ToHex(txchains[tx]), 'allowhighfees': allowhighfees, 'dontcheckfee': dontcheckfee }) # Send a txn, one by one, through p2p interface. conn.send_message(msg_tx(txchains[tx])) # Check if there is an expected number of transactions in the validation queues # - this scenario relies on ptv delayed processing wait_until(lambda: conn.rpc.getblockchainactivity()["transactions"] == num_of_chains * chain_length, timeout=timeout) # Submit a batch of txns through rpc interface. rejected_txns = conn.rpc.sendrawtransactions(rpc_txs_bulk_input) # There should be num_of_chains * chain_length rejected transactions. # - there are num_of_chains*chain_length known transactions # - due to the fact that all were received through the p2p interface # - all are waiting in the ptv queues assert_equal(len(rejected_txns['known']), num_of_chains * chain_length) # No transactions should be in the mempool. assert_equal(conn.rpc.getmempoolinfo()['size'], 0)
def run_test(self): with self.run_node_with_connections( "Scenario 1", 0, ['-acceptnonstdtxn=1', '-genesisactivationheight=10'], number_of_connections=1) as (conn, ): coinbase_tx, coinbase_key = make_coinbase(conn) conn.rpc.generate(100) tx_data = spend_tx_to_data(coinbase_tx, coinbase_key) conn.send_message(msg_tx(tx_data)) conn.cb.sync_with_ping() url = urllib.parse.urlparse(self.nodes[0].url) json_mempool = json.loads( http_get_call(url.hostname, url.port, f'/rest/mempool/contents.json')) json_tx = json.loads( http_get_call( url.hostname, url.port, f'/rest/getutxos/checkmempool/{tx_data.hash}-0.json')) assert len( json_mempool ) == 1, f"Only one tx should be in mempool. Found {len(json_mempool)}" assert tx_data.hash in json_mempool, "Our tx should be in mempool" assert json_tx['utxos'][0]['scriptPubKey'][ 'hex'] == bytes_to_hex_str(OP_TRUE_OP_RETURN_SCRIPT)
def send_txn(self, rpcsend, conn, tx): if conn is not None: conn.send_message(msg_tx(tx)) elif rpcsend is not None: self.rpc_send_txn(rpcsend, tx) else: raise Exception("Unspecified interface!")
def run_scenario3(self, conn, spend, num_txs_to_create, locking_script, num_ds_to_create=0, shuffle_txs=False, money_to_spend=2000000, timeout=60): all_nonstd_txs = [] all_ds_txs = [] # Create the set of required txs. for tx in spend: nonstd_txs, ds_txs, rejected_txs = self.run_scenario2( conn, [tx], num_txs_to_create, locking_script, num_ds_to_create, [], shuffle_txs, False, money_to_spend, timeout) all_nonstd_txs += nonstd_txs all_ds_txs += ds_txs all_txs = all_nonstd_txs + all_ds_txs # Shuffle txs if it is required if shuffle_txs: random.shuffle(all_txs) # Send txs for tx in all_txs: conn.send_message(msg_tx(tx)) # Return ds set if was required to create. if len(all_ds_txs): return all_nonstd_txs, all_ds_txs, rejected_txs return all_nonstd_txs, rejected_txs
def assert_rejected_transaction(self, out): def on_reject(conn, msg): assert_equal(msg.reason, b'scriptsig-not-pushonly') transaction_op_add = create_transaction(out.tx, out.n, CScript([1, 1, OP_ADD]), 100000, CScript([OP_TRUE])) self.test.connections[0].cb.on_reject = on_reject self.test.connections[0].send_message(msg_tx(transaction_op_add)) self.test.connections[0].cb.wait_for_reject()
def submit_to_mempool(conn, *txs_lists): txs = list(splice(*txs_lists)) expected_mempool_size = conn.rpc.getmempoolinfo()["size"] + len(txs) for tx in txs: conn.send_message(msg_tx(tx)) # All planned transactions should be accepted into the mempool wait_until( lambda: conn.rpc.getmempoolinfo()["size"] == expected_mempool_size)
def run_test(self): with self.run_node_with_connections( "Scenario 1", 0, ['-acceptnonstdtxn=1'], number_of_connections=1) as (conn, ): coinbase_tx, coinbase_key = make_coinbase(conn) conn.rpc.generate(100) sep_tx, sep_keys = make_separator_tx(coinbase_tx, coinbase_key, 5) conn.send_message(msg_tx(sep_tx)) sleep(1) conn.rpc.generate(10) tx, _ = spend_separator_tx(sep_tx, sep_keys) conn.send_message(msg_tx(tx)) wait_until(lambda: len(conn.rpc.getrawmempool()) == 1, timeout=5)
def run_scenario1(self, conn, num_of_chains, chain_length, spend, timeout): # Create and send tx chains. txchains = self.get_txchains_n(num_of_chains, chain_length, spend) for tx in range(len(txchains)): conn.send_message(msg_tx(txchains[tx])) # Check if the validation queues are empty. wait_until(lambda: self.nodes[0].rpc.getblockchainactivity()["transactions"] == 0, timeout=timeout) # Check if required transactions are accepted by the mempool. self.check_mempool(conn.rpc, txchains, timeout)
def generate_and_send_txchains_n(self, conn, num_of_chains, chain_length, spend, locking_script, money_to_spend=5000000000, factor=10, timeout=60): # Create and send txs. In this case there will be num_txs_to_create txs of chain length equal 1. txchains = self.get_txchains_n(num_of_chains, chain_length, spend, CScript(), locking_script, money_to_spend, factor) for tx in range(len(txchains)): conn.send_message(msg_tx(txchains[tx])) # Check if the validation queues are empty. wait_until(lambda: conn.rpc.getblockchainactivity()["transactions"] == 0, timeout=timeout) return txchains
def generate_and_send_txchains_n(self, conn, num_of_chains, chain_length, spend, locking_script, money_to_spend=2000000, vout_size=10, timeout=60): # Create and send txs. In this case there will be num_txs_to_create txs of chain length equal 1. txchains = self.get_txchains_n(num_of_chains, chain_length, spend, CScript(), locking_script, money_to_spend, vout_size) for tx in range(len(txchains)): conn.send_message(msg_tx(txchains[tx])) # Check if the validation queues are empty. wait_for_ptv_completion(conn, num_of_chains*chain_length, timeout=timeout) return txchains
def run_scenario1(self, conn, num_of_chains, chain_length, spend, timeout): # Create and send tx chains. txchains = self.get_txchains_n(num_of_chains, chain_length, spend) for tx in range(len(txchains)): conn.send_message(msg_tx(txchains[tx])) # Check if the validation queues are empty. wait_for_ptv_completion(conn, num_of_chains * chain_length, timeout=timeout) # Check if required transactions are accepted by the mempool. self.check_mempool(conn.rpc, txchains, timeout)
def run_scenario2(self, conn, spend, num_txs_to_create, locking_script, num_ds_to_create=0, additional_txs=[], shuffle_txs=False, send_txs=True, money_to_spend=2000000, timeout=60): # A handler to catch reject messages. rejected_txs = [] def on_reject(conn, msg): rejected_txs.append(msg) # A double spend reject message is the expected one to occur. assert_equal(msg.reason, b'txn-double-spend-detected') conn.cb.on_reject = on_reject # Create and send tx chains with non-std outputs. # - one tx with vout_size=num_txs_to_create outpoints will be created txchains = self.generate_and_send_txchains_n(conn, 1, 1, spend, locking_script, money_to_spend, num_txs_to_create, timeout) # Check if required transactions are accepted by the mempool. self.check_mempool(conn.rpc, txchains, timeout) # Create a new block # - having an empty mempool (before submitting non-std txs) will simplify further checks. conn.rpc.generate(1) # Create and send transactions spending non-std outputs. nonstd_txs, ds_txs = self.generate_transactons(txchains, CScript([OP_TRUE]), locking_script, num_ds_to_create) all_txs = nonstd_txs + ds_txs + additional_txs # Shuffle txs if it is required if shuffle_txs: random.shuffle(all_txs) # Send txs if it is required if send_txs: for tx in all_txs: conn.send_message(msg_tx(tx)) # Return ds set if was requested. if len(ds_txs): return nonstd_txs + additional_txs, ds_txs, rejected_txs return nonstd_txs + additional_txs, rejected_txs
def _process_p2p_accepts(self, connection, to_accept, test_label, height_label): for tx in to_accept: self.log.info(f"Sending and processing the accept tx {tx.hash}") connection.send_message(msg_tx(tx)) def tt(): mempool = connection.rpc.getrawmempool() return all((t.hash in mempool) for t in to_accept) wait_until(tt, timeout=10, check_interval=0.2, label=f"Waiting txs to be accepted. At {test_label} {height_label} tx:{','.join(tx.hash[:8]+'...' for tx in to_accept) }")
def send_transaction(self, testnode, block, address, expiry_height): tx = create_transaction(self.nodes[0], block, address, 10.0, expiry_height) testnode.send_message(msg_tx(tx)) # Sync up with node after p2p messages delivered testnode.sync_with_ping() # Sync nodes 0 and 1 sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) return tx
def assert_rejected_transaction(self, out): def on_reject(conn, msg): assert_equal( msg.reason, b'mandatory-script-verify-flag-failed (Only non-push operators allowed in signatures)' ) transaction_op_add = create_transaction( out.tx, out.n, CScript([1, 1, OP_ADD, OP_DROP]), 100000, CScript([OP_TRUE])) self.test.connections[0].cb.on_reject = on_reject self.test.connections[0].send_message(msg_tx(transaction_op_add)) self.test.connections[0].cb.wait_for_reject()
def create_and_send_transactions(self, conn, spendtx, num_of_transactions, money_to_spend=5000000000): for i in range(0, num_of_transactions): money_to_spend = money_to_spend - 500000000 # Large fee required for big txns tx = create_tx(spendtx, 0, money_to_spend, script=CScript([OP_DROP, OP_TRUE])) tx.vout.append(CTxOut(0, CScript([OP_FALSE, OP_RETURN, bytearray([0x00] * (ONE_MEGABYTE * 880))]))) self.sign_tx(tx, spendtx, 0) tx.rehash() conn.send_message(msg_tx(tx)) wait_until(lambda: tx.hash in conn.rpc.getrawmempool(), timeout=int(360 * self.options.timeoutfactor)) logger.info("Submitted txn {} of {}".format(i+1, num_of_transactions)) assert conn.rpc.getmempoolinfo()['size'] == i+1 spendtx = tx
def get_tests(self): # shorthand for functions block = self.chain.next_block node = self.nodes[0] self.chain.set_genesis_hash(int(node.getbestblockhash(), 16)) block(0) yield self.accepted() test, out, _ = prepare_init_chain(self.chain, 100, 100) yield test # Create transaction with OP_RETURN in the locking script. tx1 = create_transaction(out[0].tx, out[0].n, b'', 100000, CScript([OP_RETURN])) self.test.connections[0].send_message(msg_tx(tx1)) # wait for transaction processing sleep(1) # generate an empty block, height is 102 block(1, spend=out[1]) yield self.accepted() tx2 = create_transaction(tx1, 0, b'\x51', 1, CScript([OP_TRUE])) self.test.connections[0].send_message(msg_tx(tx2)) # wait for transaction processing sleep(1) # Mine block (height 103) with new transactions. self.nodes[0].generate(1) tx = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['tx'] assert_equal(len(tx), 3) assert_equal(tx1.hash, tx[1]) assert_equal(tx2.hash, tx[2])
def generate_and_send_txchains_n(self, conn, num_of_chains, chain_length, spend, locking_script, money_to_spend=5000000000, factor=10, timeout=60): # Create and send txs. In this case there will be num_txs_to_create txs of chain length equal 1. txchains = self.get_txchains_n(num_of_chains, chain_length, spend, CScript(), locking_script, money_to_spend, factor) for tx in range(len(txchains)): conn.send_message(msg_tx(txchains[tx])) return txchains
def create_fund_txn(self, conn, noutput, tx_fee, locking_script, pubkey=None): # create a new block with coinbase last_block_info = conn.rpc.getblock(conn.rpc.getbestblockhash()) coinbase = create_coinbase(height=last_block_info["height"] + 1, pubkey=pubkey) new_block = create_block(int(last_block_info["hash"], 16), coinbase=coinbase, nTime=last_block_info["time"] + 1) new_block.nVersion = last_block_info["version"] new_block.solve() conn.send_message(msg_block(new_block)) wait_until(lambda: conn.rpc.getbestblockhash() == new_block.hash, check_interval=0.3) # mature the coinbase conn.rpc.generate(100) # create and send a funding txn funding_tx = self.create_tx([(coinbase, 0)], 2, 1.5, locking_script) conn.send_message(msg_tx(funding_tx)) check_mempool_equals(conn.rpc, [funding_tx]) conn.rpc.generate(1) # create a new txn which pays the specified tx_fee new_tx = self.create_tx([(funding_tx, 0)], noutput, tx_fee, locking_script) last_block_info = conn.rpc.getblock(conn.rpc.getbestblockhash()) new_block = create_block( int(last_block_info["hash"], 16), coinbase=create_coinbase(height=last_block_info["height"] + 1), nTime=last_block_info["time"] + 1) new_block.nVersion = last_block_info["version"] new_block.vtx.append(new_tx) new_block.hashMerkleRoot = new_block.calc_merkle_root() new_block.calc_sha256() new_block.solve() conn.send_message(msg_block(new_block)) wait_until(lambda: conn.rpc.getbestblockhash() == new_block.hash, check_interval=0.3) return new_tx
def run_scenario3(self, conn, spend, num_txs_to_create, locking_script, num_ds_to_create=0, shuffle_txs=False, money_to_spend=2000000, timeout=60): all_nonstd_txs = [] all_ds_txs = [] # Create the set of required txs. for tx in spend: nonstd_txs, ds_txs, rejected_txs = self.run_scenario2(conn, [tx], num_txs_to_create, locking_script, num_ds_to_create, [], shuffle_txs, False, money_to_spend, timeout) all_nonstd_txs += nonstd_txs all_ds_txs += ds_txs all_txs = all_nonstd_txs + all_ds_txs # Shuffle txs if it is required if shuffle_txs: random.shuffle(all_txs) # Send txs for tx in all_txs: conn.send_message(msg_tx(tx)) # Check if the validation queues are empty. wait_until(lambda: conn.rpc.getblockchainactivity()["transactions"] == 0, timeout=timeout) # Return ds set if was required to create. if len(all_ds_txs): return all_nonstd_txs, all_ds_txs, rejected_txs return all_nonstd_txs, rejected_txs