def send_block(self, node, txs, accept=False): snapshot_hash = get_tip_snapshot_meta(self.nodes[0]).hash coin = get_unspent_coins(self.nodes[0], 1)[0] block = create_block( self.tip, create_coinbase(self.lastblockheight + 1, coin, snapshot_hash), self.lastblocktime + 1) block.vtx[0] = sign_coinbase(self.nodes[0], block.vtx[0]) block.nVersion = 4 for tx in txs: tx.rehash() block.vtx.append(tx) block.ensure_ltor() block.compute_merkle_trees() block.solve() node.p2p.send_and_ping(msg_block(block)) if (accept): assert_equal(node.getbestblockhash(), block.hash) self.tip = block.sha256 self.lastblockhash = block.hash self.lastblocktime += 1 self.lastblockheight += 1 else: assert_equal(node.getbestblockhash(), self.lastblockhash)
def get_coinbase(self, height, **kwargs): snapshot_hash = self.get_hash(height-1) utxo = self.get_spendable_utxo(height) self.available_outputs.remove(utxo) self.spent_outputs.append(utxo) coin = {'txid': hex(utxo.outpoint.hash), 'vout': utxo.outpoint.n, 'amount': utxo.txOut.nValue/UNIT} coinbase = sign_coinbase(self.node, create_coinbase(height, coin, snapshot_hash, **kwargs)) self.prev_coinbase = coinbase return coinbase
def solve_and_send_block(prevhash, height, time): snapshot_hash = get_tip_snapshot_meta(node).hash stake = get_unspent_coins(node, 1)[0] b = create_block( prevhash, sign_coinbase(node, create_coinbase(height, stake, snapshot_hash)), time) b.solve() node.p2p.send_message(msg_block(b)) node.p2p.sync_with_ping() return b
def build_chain(self, nblocks, prev_hash, prev_height, prev_median_time, unspent_outputs, snapshot_meta): blocks = [] for i in range(nblocks): coinbase = sign_coinbase(self.nodes[0], create_coinbase(prev_height + 1, unspent_outputs[i], snapshot_meta.hash)) block_time = prev_median_time + 1 block = create_block(int(prev_hash, 16), coinbase, block_time) block.solve() blocks.append(block) prev_hash = block.hash snapshot_meta = update_snapshot_with_tx(self.nodes[0], snapshot_meta, prev_height + 1, coinbase) prev_height += 1 prev_median_time = block_time return blocks
def create_block(self): node = self.nodes[0] stake = node.listunspent()[0] tip_hash = node.getblockchaininfo()['bestblockhash'] tip_header = node.getblockheader(tip_hash) snapshot_hash = get_tip_snapshot_meta(node).hash coinbase = create_coinbase(tip_header["height"] + 1, stake, snapshot_hash) coinbase = sign_coinbase(self.nodes[0], coinbase) block = create_block(int(tip_hash, 16), coinbase, tip_header["mediantime"] + 1) block.solve() block.rehash() return block
def build_block_with_immature_stake(node): height = node.getblockcount() stakes = node.listunspent() # Take the latest, immature stake stake = min(stakes, key=lambda x: x['confirmations']) snapshot_meta = get_tip_snapshot_meta(node) coinbase = sign_coinbase( node, create_coinbase(height, stake, snapshot_meta.hash)) tip = int(node.getbestblockhash(), 16) block_time = node.getblock( self.nodes[0].getbestblockhash())['time'] + 1 block = create_block(tip, coinbase, block_time) return block
def create_test_block(self, coin, txs, version=536870912): coinbase = sign_coinbase( self.nodes[0], create_coinbase(self.tipheight + 1, coin, self.tip_snapshot_meta.hash)) block = create_block(self.tip, coinbase, self.last_block_time + 600) block.nVersion = version block.vtx.extend(txs) block.ensure_ltor() block.compute_merkle_trees() block.solve() self.tip_snapshot_meta = update_snapshot_with_tx( self.nodes[0], self.tip_snapshot_meta, self.tipheight + 1, coinbase) return block
def generate_blocks(self, coins, number, version, test_blocks=[]): for i in range(number): coin = coins.pop() coinbase = sign_coinbase( self.nodes[0], create_coinbase(self.height, coin, self.snapshot_meta.hash)) block = create_block(self.tip, coinbase, self.last_block_time + 1) block.nVersion = version block.solve() test_blocks.append([block, True]) self.last_block_time += 1 self.snapshot_meta = update_snapshot_with_tx( self.nodes[0], self.snapshot_meta, self.height, coinbase) self.tip = block.sha256 self.height += 1 return test_blocks
def submit_block_with_tx(node, tx): ctx = CTransaction() ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx))) tip = node.getbestblockhash() height = node.getblockcount() + 1 block_time = node.getblockheader(tip)["mediantime"] + 1 snapshot_hash = get_tip_snapshot_meta(node).hash stake = node.listunspent()[0] block = create_block(int(tip, 16), sign_coinbase(node, create_coinbase(height, stake, snapshot_hash)), block_time) block.vtx.append(ctx) block.rehash() block.compute_merkle_trees() block.solve() node.p2p.send_and_ping(msg_block(block)) assert_equal(node.getbestblockhash(), block.hash) return block
def build_block_on_tip(self, node, txs=[]): height = node.getblockcount() tip = node.getbestblockhash() mtp = node.getblockheader(tip)['mediantime'] meta = get_tip_snapshot_meta(node) coin = get_unspent_coins(node, 1)[0] block = create_block( int(tip, 16), sign_coinbase(node, create_coinbase(height + 1, coin, meta.hash)), mtp + 1) block.nVersion = 4 if txs: block.vtx.extend(txs) block.ensure_ltor() block.compute_merkle_trees() block.solve() return block
def build_block_with_remote_stake(node): height = node.getblockcount() snapshot_meta = get_tip_snapshot_meta(node) stakes = node.liststakeablecoins() coin = stakes['stakeable_coins'][0]['coin'] script_pubkey = hex_str_to_bytes(coin['script_pub_key']['hex']) stake = { 'txid': coin['out_point']['txid'], 'vout': coin['out_point']['n'], 'amount': coin['amount'], } tip = int(node.getbestblockhash(), 16) block_time = node.getblock( node.getbestblockhash())['time'] + 1 coinbase = sign_coinbase( node, create_coinbase( height, stake, snapshot_meta.hash, raw_script_pubkey=script_pubkey)) return create_block(tip, coinbase, block_time)
def get_empty_block(self, sync_height): sync_blocks(self.nodes, height=sync_height) node0 = self.nodes[0] hashprev = uint256_from_str(unhexlify(node0.getbestblockhash())[::-1]) snapshot_hash = get_tip_snapshot_meta(node0).hash if len(self.spendable_outputs) > 0: block_time = self.spendable_outputs[-1].nTime + 1 else: block_time = int(time_time()) + 2 block = create_block(hashprev=hashprev, coinbase=sign_coinbase( self.nodes[0], create_coinbase(height=sync_height + 1, stake=node0.listunspent()[0], snapshot_hash=snapshot_hash)), nTime=block_time) block.solve() return block
def send_blocks_with_version(self, peer, numblocks, version): """Send numblocks blocks to peer with version set""" tip = self.nodes[0].getbestblockhash() height = self.nodes[0].getblockcount() block_time = self.nodes[0].getblockheader(tip)["time"] + 1 tip = int(tip, 16) snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) for _ in range(numblocks): stake = self.nodes[0].listunspent()[0] coinbase = create_coinbase(height + 1, stake, snapshot_meta.hash) coinbase = sign_coinbase(self.nodes[0], coinbase) block = create_block(tip, coinbase, block_time) block.nVersion = version block.solve() peer.send_message(msg_block(block)) self.nodes[0].waitforblockheight(height+1, 10000) block_time += 1 height += 1 tip = block.sha256 snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) peer.sync_with_ping()
def run_test(self): """Main test logic""" # Create P2P connections to two of the nodes self.nodes[0].add_p2p_connection(BaseNode()) # Start up network handling in another thread. This needs to be called # after the P2P connections have been created. network_thread_start() # wait_for_verack ensures that the P2P connection is fully up. self.nodes[0].p2p.wait_for_verack() self.setup_stake_coins(self.nodes[0]) # Generating a block on one of the nodes will get us out of IBD blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)] self.sync_all([self.nodes[0:1]]) # Notice above how we called an RPC by calling a method with the same # name on the node object. Notice also how we used a keyword argument # to specify a named RPC argument. Neither of those are defined on the # node object. Instead there's some __getattr__() magic going on under # the covers to dispatch unrecognised attribute calls to the RPC # interface. # Logs are nice. Do plenty of them. They can be used in place of comments for # breaking the test into sub-sections. self.log.info("Starting test!") self.log.info("Calling a custom function") custom_function() self.log.info("Calling a custom method") self.custom_method() self.log.info("Create some blocks") self.tip = int(self.nodes[0].getbestblockhash(), 16) self.block_time = self.nodes[0].getblock( self.nodes[0].getbestblockhash())['time'] + 1 height = self.nodes[0].getblockcount() snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) stakes = self.nodes[0].listunspent() for stake in stakes: # Use the mininode and blocktools functionality to manually build a block # Calling the generate() rpc is easier, but this allows us to exactly # control the blocks and transactions. coinbase = sign_coinbase( self.nodes[0], create_coinbase(height, stake, snapshot_meta.hash)) block = create_block(self.tip, coinbase, self.block_time) # Wait until the active chain picks up the previous block wait_until(lambda: self.nodes[0].getblockcount() == height, timeout=5) snapshot_meta = update_snapshot_with_tx(self.nodes[0], snapshot_meta, height + 1, coinbase) block.solve() block_message = msg_block(block) # Send message is used to send a P2P message to the node over our P2PInterface self.nodes[0].p2p.send_message(block_message) self.tip = block.sha256 blocks.append(self.tip) self.block_time += 1 height += 1 self.log.info( "Wait for node1 to reach current tip (height %d) using RPC" % height) self.nodes[1].waitforblockheight(height) self.log.info("Connect node2 and node1") connect_nodes(self.nodes[1], 2) self.log.info("Add P2P connection to node2") # We can't add additional P2P connections once the network thread has started. Disconnect the connection # to node0, wait for the network thread to terminate, then connect to node2. This is specific to # the current implementation of the network thread and may be improved in future. self.nodes[0].disconnect_p2ps() network_thread_join() self.nodes[2].add_p2p_connection(BaseNode()) network_thread_start() self.nodes[2].p2p.wait_for_verack() self.log.info( "Wait for node2 reach current tip. Test that it has propagated all the blocks to us" ) getdata_request = msg_getdata() for block in blocks: getdata_request.inv.append(CInv(2, block)) self.nodes[2].p2p.send_message(getdata_request) # wait_until() will loop until a predicate condition is met. Use it to test properties of the # P2PInterface objects. wait_until(lambda: sorted(blocks) == sorted( list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock) self.log.info("Check that each block was received only once") # The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving # messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking # and synchronization issues. Note wait_until() acquires this global lock when testing the predicate. with mininode_lock: for block in self.nodes[2].p2p.block_receive_map.values(): assert_equal(block, 1)
def test_sequence_lock_unconfirmed_inputs(self): # Store height so we can easily reset the chain at the end of the test cur_height = self.nodes[0].getblockcount() # Create a mempool tx. txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2) tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid)) tx1.rehash() # Anyone-can-spend mempool tx. # Sequence lock of 0 should pass. tx2 = CTransaction() tx2.nVersion = 2 tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)] tx2.vout = [ CTxOut(int(tx1.vout[0].nValue - self.relayfee * UNIT), CScript([b'a'])) ] tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"] tx2 = FromHex(tx2, tx2_raw) tx2.rehash() self.nodes[0].sendrawtransaction(tx2_raw) # Create a spend of the 0th output of orig_tx with a sequence lock # of 1, and test what happens when submitting. # orig_tx.vout[0] must be an anyone-can-spend output def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): sequence_value = 1 if not use_height_lock: sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG tx = CTransaction() tx.nVersion = 2 tx.vin = [ CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value) ] tx.vout = [ CTxOut(int(orig_tx.vout[0].nValue - relayfee * UNIT), CScript([b'a' * 35])) ] tx.rehash() if (orig_tx.hash in node.getrawmempool()): # sendrawtransaction should fail if the tx is in the mempool assert_raises_rpc_error(-26, NOT_FINAL_ERROR, node.sendrawtransaction, ToHex(tx)) else: # sendrawtransaction should succeed if the tx is not in the mempool node.sendrawtransaction(ToHex(tx)) return tx test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True) test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) # Now mine some blocks, but make sure tx2 doesn't get mined. # Use prioritisetransaction to lower the effective feerate to 0 self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(-self.relayfee * UNIT)) cur_time = int(time.time()) for i in range(10): self.nodes[0].setmocktime(cur_time + 600) self.nodes[0].generate(1) cur_time += 600 assert tx2.hash in self.nodes[0].getrawmempool() tip_snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True) test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) # Mine tx2, and then try again self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(self.relayfee * UNIT)) # Advance the time on the node so that we can test timelocks self.nodes[0].setmocktime(cur_time + 600) self.nodes[0].generate(1) assert tx2.hash not in self.nodes[0].getrawmempool() # Now that tx2 is not in the mempool, a sequence locked spend should # succeed tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) assert tx3.hash in self.nodes[0].getrawmempool() self.nodes[0].generate(1) assert tx3.hash not in self.nodes[0].getrawmempool() # One more test, this time using height locks tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True) assert tx4.hash in self.nodes[0].getrawmempool() # Now try combining confirmed and unconfirmed inputs tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True) assert tx5.hash not in self.nodes[0].getrawmempool() utxos = self.nodes[0].listunspent() tx5.vin.append( CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1)) tx5.vout[0].nValue += int(utxos[0]["amount"] * UNIT) raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"] assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5) # Test mempool-BIP68 consistency after reorg # # State of the transactions in the last blocks: # ... -> [ tx2 ] -> [ tx3 ] # tip-1 tip # And currently tx4 is in the mempool. # # If we invalidate the tip, tx3 should get added to the mempool, causing # tx4 to be removed (fails sequence-lock). self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) assert tx4.hash not in self.nodes[0].getrawmempool() assert tx3.hash in self.nodes[0].getrawmempool() # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in # diagram above). # This would cause tx2 to be added back to the mempool, which in turn causes # tx3 to be removed. tip = int( self.nodes[0].getblockhash(self.nodes[0].getblockcount() - 1), 16) height = self.nodes[0].getblockcount() # Let's get the available stake that is not already used # We must exclude tx2 outputs from the list since any stake referred to them will fail # In order to do that, we limit outputs with the number of minimum confirmations (minconf = 2) avail_stake = [ x for x in self.nodes[0].listunspent(2) if x['txid'] != tx1.hash ] for i in range(2): stake = avail_stake.pop() coinbase = sign_coinbase( self.nodes[0], create_coinbase(height, stake, tip_snapshot_meta.hash)) block = create_block(tip, coinbase, cur_time) block.nVersion = 3 block.solve() tip = block.sha256 tip_snapshot_meta = update_snapshot_with_tx( self.nodes[0], tip_snapshot_meta, height, coinbase) height += 1 self.nodes[0].p2p.send_and_ping(msg_block(block)) cur_time += 1 # sync as the reorg is happening self.nodes[0].p2p.sync_with_ping() mempool = self.nodes[0].getrawmempool() assert tx3.hash not in mempool assert tx2.hash in mempool # Reset the chain and get rid of the mocktimed-blocks self.nodes[0].setmocktime(0) self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height + 1)) self.nodes[0].generate(10)
def create_coinbase(self, height, snapshot_hash, coin=None): if coin is None: coin = get_unspent_coins(self.nodes[0], 1)[0] return sign_coinbase(self.nodes[0], create_coinbase(height, coin, snapshot_hash))
def run_test(self): self.setup_stake_coins(self.nodes[0]) self.nodes[0].add_p2p_connection(P2PInterface()) network_thread_start() # wait_for_verack ensures that the P2P connection is fully up. self.nodes[0].p2p.wait_for_verack() self.log.info("Mining one block") self.coinbase_blocks = self.nodes[0].generate(1) self.nodeaddress = self.nodes[0].getnewaddress() self.log.info( "Test that invalid-according-to-cltv transactions cannot appear in a block" ) spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 1.0) cltv_invalidate(spendtx) spendtx.rehash() # First we show that this tx is valid except for CLTV by getting it # accepted to the mempool (which we can achieve with # -promiscuousmempoolflags). self.nodes[0].p2p.send_and_ping(msg_tx(spendtx)) assert spendtx.hash in self.nodes[0].getrawmempool() tip = self.nodes[0].getbestblockhash() block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 snapshot_hash = get_tip_snapshot_meta(self.nodes[0]).hash coin = get_unspent_coins(self.nodes[0], 1)[0] coinbase = sign_coinbase(self.nodes[0], create_coinbase(1, coin, snapshot_hash)) block = create_block(int(tip, 16), coinbase, block_time) block.nVersion = 4 block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(self.nodes[0].getbestblockhash(), tip) wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock) with mininode_lock: assert self.nodes[0].p2p.last_message["reject"].code in [ REJECT_INVALID, REJECT_NONSTANDARD ] assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256) if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID: # Generic rejection when a block is invalid assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'block-validation-failed') else: assert b'Negative locktime' in self.nodes[0].p2p.last_message[ "reject"].reason self.log.info( "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted" ) spendtx = cltv_validate(self.nodes[0], spendtx, 0) spendtx.rehash() block.vtx.pop(1) block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
def run_test(self): self.setup_stake_coins(self.nodes[0]) self.nodes[0].add_p2p_connection(P2PInterface()) self.log.info("Mining one block") self.coinbase_txids = [ self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(1) ] self.nodeaddress = self.nodes[0].getnewaddress() self.log.info( "Test that transactions with non-DER signatures cannot appear in a block" ) spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0], self.nodeaddress, amount=1.0) unDERify(spendtx) spendtx.rehash() # First we show that this tx is valid except for DERSIG by getting it # rejected from the mempool for exactly that reason. assert_equal([{ 'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Non-canonical DER signature)' }], self.nodes[0].testmempoolaccept( rawtxs=[bytes_to_hex_str(spendtx.serialize())], allowhighfees=True)) # Now we verify that a block with this transaction is also invalid. tip = self.nodes[0].getbestblockhash() block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 snapshot_hash = get_tip_snapshot_meta(self.nodes[0]).hash coin = get_unspent_coins(self.nodes[0], 1)[0] coinbase = sign_coinbase(self.nodes[0], create_coinbase(1, coin, snapshot_hash)) block = create_block(int(tip, 16), coinbase, block_time) block.nVersion = 3 block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), int(tip, 16)) wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock) with mininode_lock: # We can receive different reject messages depending on whether # unit-e is running with multiple script check threads. If script # check threads are not in use, then transaction script validation # happens sequentially, and unit-e produces more specific reject # reasons. assert self.nodes[0].p2p.last_message["reject"].code in [ REJECT_INVALID, REJECT_NONSTANDARD ] assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256) if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID: # Generic rejection when a block is invalid reject_reason = self.nodes[0].p2p.last_message["reject"].reason assert_equal(reject_reason, b'block-validation-failed') else: assert b'Non-canonical DER signature' in self.nodes[ 0].p2p.last_message["reject"].reason self.log.info( "Test that a version 3 block with a DERSIG-compliant transaction is accepted" ) block.vtx[1] = create_transaction(self.nodes[0], self.coinbase_txids[0], self.nodeaddress, amount=1.0) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
def run_test(self): node = self.nodes[0] # convenience reference to the node self.setup_stake_coins(self.nodes[0]) self.bootstrap_p2p() # Add one p2p connection to the node best_block = self.nodes[0].getbestblockhash() tip = int(best_block, 16) best_block_time = self.nodes[0].getblock(best_block)['time'] block_time = best_block_time + 1 self.log.info("Create a new block with an anyone-can-spend coinbase.") height = 1 snapshot_hash = get_tip_snapshot_meta(self.nodes[0]).hash coin = get_unspent_coins(self.nodes[0], 1)[0] coinbase = sign_coinbase(self.nodes[0], create_coinbase(height, coin, snapshot_hash)) block = create_block(tip, coinbase, block_time) block_time += 1 block.solve() # Save the coinbase for later block1 = block tip = block.sha256 node.p2p.send_blocks_and_test([block], node, success=True) self.log.info("Mature the block.") blocks = [] snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) for i in range(100): prev_coinbase = coinbase height += 1 stake = { 'txid': prev_coinbase.hash, 'vout': 1, 'amount': prev_coinbase.vout[1].nValue / UNIT } coinbase = sign_coinbase( self.nodes[0], create_coinbase(height, stake, snapshot_meta.hash)) block = create_block(tip, coinbase, block_time) block.solve() tip = block.sha256 block_time += 1 blocks.append(block) input_utxo = UTXO(height - 1, TxType.COINBASE, coinbase.vin[1].prevout, prev_coinbase.vout[1]) output_reward = UTXO(height, TxType.COINBASE, COutPoint(coinbase.sha256, 0), coinbase.vout[0]) output_stake = UTXO(height, TxType.COINBASE, COutPoint(coinbase.sha256, 1), coinbase.vout[1]) snapshot_meta = calc_snapshot_hash(self.nodes[0], snapshot_meta, height, [input_utxo], [output_reward, output_stake], coinbase) node.p2p.send_blocks_and_test(blocks, node, success=True) # b'\x64' is OP_NOTIF # Transaction will be rejected with code 16 (REJECT_INVALID) # and we get disconnected immediately self.log.info('Test a transaction that is rejected') tx1 = create_tx_with_script(coinbase, 1, script_sig=b'\x64' * 35, amount=50 * UNIT - 12000) node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True) # Make two p2p connections to provide the node with orphans # * p2ps[0] will send valid orphan txs (one with low fee) # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that) self.reconnect_p2p(num_connections=2) self.log.info('Test orphan transaction handling ... ') # Create a root transaction that we withhold until all dependend transactions # are sent out and in the orphan cache SCRIPT_PUB_KEY_OP_TRUE = b'\x51\x75' * 15 + b'\x51' tx_withhold = CTransaction() tx_withhold.vin.append( CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0))) tx_withhold.vout.append( CTxOut(nValue=PROPOSER_REWARD * UNIT - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) tx_withhold.calc_sha256() # Our first orphan tx with some outputs to create further orphan txs tx_orphan_1 = CTransaction() tx_orphan_1.vin.append( CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0))) tx_orphan_1.vout = [ CTxOut(nValue=1 * UNIT, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE) ] * 3 tx_orphan_1.calc_sha256() # A valid transaction with low fee tx_orphan_2_no_fee = CTransaction() tx_orphan_2_no_fee.vin.append( CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0))) tx_orphan_2_no_fee.vout.append( CTxOut(nValue=1 * UNIT, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) # A valid transaction with sufficient fee tx_orphan_2_valid = CTransaction() tx_orphan_2_valid.vin.append( CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1))) tx_orphan_2_valid.vout.append( CTxOut(nValue=1 * UNIT - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) tx_orphan_2_valid.calc_sha256() # An invalid transaction with negative fee tx_orphan_2_invalid = CTransaction() tx_orphan_2_invalid.vin.append( CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2))) tx_orphan_2_invalid.vout.append( CTxOut(nValue=Decimal('1.1') * UNIT, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) self.log.info('Send the orphans ... ') # Send valid orphan txs from p2ps[0] node.p2p.send_txs_and_test( [tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False) # Send invalid tx from p2ps[1] node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid], node, success=False) assert_equal(0, node.getmempoolinfo()['size']) # Mempool should be empty assert_equal(2, len(node.getpeerinfo())) # p2ps[1] is still connected self.log.info('Send the withhold tx ... ') node.p2p.send_txs_and_test([tx_withhold], node, success=True) # Transactions that should end up in the mempool expected_mempool = { t.hash for t in [ tx_withhold, # The transaction that is the root for all orphans tx_orphan_1, # The orphan transaction that splits the coins tx_orphan_2_valid, # The valid transaction (with sufficient fee) ] } # Transactions that do not end up in the mempool # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx) # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx) wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12) # p2ps[1] is no longer connected assert_equal(expected_mempool, set(node.getrawmempool())) # restart node with sending BIP61 messages disabled, check that it disconnects without sending the reject message self.log.info( 'Test a transaction that is rejected, with BIP61 disabled') self.restart_node(0, ['-enablebip61=0', '-persistmempool=0']) self.reconnect_p2p(num_connections=1) with node.assert_debug_log(expected_msgs=[ "{} from peer=0 was not accepted: mandatory-script-verify-flag-failed (Invalid OP_IF construction) (code 16)" .format(tx1.hash), "disconnecting peer=0", ]): node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True) # send_txs_and_test will have waited for disconnect, so we can safely check that no reject has been received assert_equal(node.p2p.reject_code_received, None)
def run_test(self): # Create a block with 2500 stakeable outputs self.build_coins_to_stake() # Propagate it to nodes 1 and 2 and stop them for now self.sync_first_block() # Key Management for node 0 keytool = KeyTool.for_node(self.nodes[0]) # Connect to node0 p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) # Build the blockchain self.tip = int(self.nodes[0].getbestblockhash(), 16) self.block_time = self.nodes[0].getblock( self.nodes[0].getbestblockhash())['time'] + 1 self.blocks = [] # Get a pubkey for the coinbase TXO coinbase_key = keytool.make_privkey() coinbase_pubkey = bytes(coinbase_key.get_pubkey()) keytool.upload_key(coinbase_key) self.log.info( "Create the first block with a coinbase output to our key") height = 2 snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) coin = self.get_coin_to_stake() coinbase = sign_coinbase( self.nodes[0], create_coinbase(height, coin, snapshot_meta.hash, coinbase_pubkey)) block = create_block(self.tip, coinbase, self.block_time) self.blocks.append(block) self.block_time += 1 block.solve() # Save the coinbase for later self.block1 = block self.tip = block.sha256 utxo1 = UTXO(height, TxType.COINBASE, COutPoint(coinbase.sha256, 0), coinbase.vout[0]) snapshot_meta = update_snapshot_with_tx(self.nodes[0], snapshot_meta, height, coinbase) height += 1 self.log.info( "Bury the block 100 deep so the coinbase output is spendable") for i in range(100): coin = self.get_coin_to_stake() coinbase = sign_coinbase( self.nodes[0], create_coinbase(height, coin, snapshot_meta.hash, coinbase_pubkey)) block = create_block(self.tip, coinbase, self.block_time) block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 snapshot_meta = update_snapshot_with_tx(self.nodes[0], snapshot_meta, height, coinbase) height += 1 self.log.info( "Create a transaction spending the coinbase output with an invalid (null) signature" ) tx = CTransaction() tx.vin.append( CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b"")) tx.vout.append( CTxOut((PROPOSER_REWARD - 1) * 100000000, CScript([OP_TRUE]))) tx.calc_sha256() coin = self.get_coin_to_stake() coinbase = sign_coinbase( self.nodes[0], create_coinbase(height, coin, snapshot_meta.hash, coinbase_pubkey)) block102 = create_block(self.tip, coinbase, self.block_time) self.block_time += 1 block102.vtx.extend([tx]) block102.compute_merkle_trees() block102.rehash() block102.solve() self.blocks.append(block102) self.tip = block102.sha256 self.block_time += 1 snapshot_meta = update_snapshot_with_tx(self.nodes[0], snapshot_meta, height, coinbase) utxo2 = UTXO(height, tx.get_type(), COutPoint(tx.sha256, 0), tx.vout[0]) snapshot_meta = calc_snapshot_hash(self.nodes[0], snapshot_meta, height, [utxo1], [utxo2]) height += 1 self.log.info("Bury the assumed valid block 2100 deep") for i in range(2100): coin = self.get_coin_to_stake() coinbase = sign_coinbase( self.nodes[0], create_coinbase(height, coin, snapshot_meta.hash, coinbase_pubkey)) block = create_block(self.tip, coinbase, self.block_time) block.nVersion = 4 block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 snapshot_meta = update_snapshot_with_tx(self.nodes[0], snapshot_meta, height, coinbase) height += 1 self.nodes[0].disconnect_p2ps() self.log.info( "Start node1 and node2 with assumevalid so they accept a block with a bad signature." ) self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)]) self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)]) p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) p2p1 = self.nodes[1].add_p2p_connection(BaseNode()) p2p2 = self.nodes[2].add_p2p_connection(BaseNode()) # send header lists to all three nodes p2p0.send_header_for_blocks(self.blocks[0:2000]) p2p0.send_header_for_blocks(self.blocks[2000:]) p2p1.send_header_for_blocks(self.blocks[0:2000]) p2p1.send_header_for_blocks(self.blocks[2000:]) p2p2.send_header_for_blocks(self.blocks[0:200]) self.log.info("Send blocks to node0. Block 103 will be rejected.") self.send_blocks_until_disconnected(p2p0) self.assert_blockchain_height(self.nodes[0], 102) self.log.info("Send all blocks to node1. All blocks will be accepted.") for i in range(2202): p2p1.send_message(msg_block(self.blocks[i])) # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync. p2p1.sync_with_ping(120) assert_equal( self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2203) self.log.info("Send blocks to node2. Block 102 will be rejected.") self.send_blocks_until_disconnected(p2p2) self.assert_blockchain_height(self.nodes[2], 102)
def run_test(self): self.setup_stake_coins(*self.nodes) # Setup the p2p connections # test_node connects to node0 (not whitelisted) test_node = self.nodes[0].add_p2p_connection(P2PInterface()) # min_work_node connects to node1 (whitelisted) min_work_node = self.nodes[1].add_p2p_connection(P2PInterface()) fork_snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) utxo_manager = UTXOManager(self.nodes[0], fork_snapshot_meta) genesis_coin = get_unspent_coins(self.nodes[0], 1)[0] genesis_txout = CTxOut( int(genesis_coin['amount'] * UNIT), CScript(hex_str_to_bytes(genesis_coin['scriptPubKey']))) genesis_utxo = [ UTXO( 0, TxType.COINBASE, COutPoint(int(genesis_coin['txid'], 16), genesis_coin['vout']), genesis_txout) ] utxo_manager.available_outputs = genesis_utxo self.log.info("1. Have nodes mine a block (leave IBD)") [n.generate(1) for n in self.nodes] tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes] tip_snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) self.log.info( "2. Send one block that builds on each tip. This should be accepted by node0." ) blocks_h2 = [] # the height 2 blocks on each node's chain block_time = int(time.time()) + 1 coin = get_unspent_coins(self.nodes[0], 1)[0] for i in range(2): coinbase = sign_coinbase( self.nodes[0], create_coinbase(2, coin, tip_snapshot_meta.hash)) blocks_h2.append(create_block(tips[i], coinbase, block_time)) blocks_h2[i].solve() block_time += 1 test_node.send_message(msg_block(blocks_h2[0])) min_work_node.send_message(msg_block(blocks_h2[1])) for x in [test_node, min_work_node]: x.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 2) assert_equal(self.nodes[1].getblockcount(), 1) self.log.info( "First height 2 block accepted by node0; correctly rejected by node1" ) self.log.info("3. Send another block that builds on genesis.") coinbase = utxo_manager.get_coinbase(1, n_pieces=300) block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0), coinbase, block_time) block_time += 1 block_h1f.solve() test_node.send_message(msg_block(block_h1f)) utxo_manager.process(coinbase, 1) test_node.sync_with_ping() tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_h1f.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash) self.log.info("4. Send another two block that build on the fork.") coinbase = utxo_manager.get_coinbase(2) block_h2f = create_block(block_h1f.sha256, coinbase, block_time) block_time += 1 block_h2f.solve() test_node.send_message(msg_block(block_h2f)) utxo_manager.process(coinbase, 2) test_node.sync_with_ping() # Since the earlier block was not processed by node, the new block # can't be fully validated. tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_h2f.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found # But this block should be accepted by node since it has equal work. self.nodes[0].getblock(block_h2f.hash) self.log.info("Second height 2 block accepted, but not reorg'ed to") self.log.info( "4b. Now send another block that builds on the forking chain.") coinbase = utxo_manager.get_coinbase(3) block_h3 = create_block(block_h2f.sha256, coinbase, block_h2f.nTime + 1) block_h3.solve() test_node.send_message(msg_block(block_h3)) utxo_manager.process(coinbase, 3) test_node.sync_with_ping() # Since the earlier block was not processed by node, the new block # can't be fully validated. tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_h3.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found self.nodes[0].getblock(block_h3.hash) # But this block should be accepted by node since it has more work. self.nodes[0].getblock(block_h3.hash) self.log.info("Unrequested more-work block accepted") self.log.info("4c. Now mine 288 more blocks and deliver") # all should be processed but # the last (height-too-high) on node (as long as it is not missing any headers) tip = block_h3 all_blocks = [] for height in range(4, 292): coinbase = utxo_manager.get_coinbase(height) next_block = create_block(tip.sha256, coinbase, tip.nTime + 1) next_block.solve() all_blocks.append(next_block) tip = next_block utxo_manager.process(coinbase, height) # Now send the block at height 5 and check that it wasn't accepted (missing header) test_node.send_message(msg_block(all_blocks[1])) test_node.sync_with_ping() assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash) assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash) # The block at height 5 should be accepted if we provide the missing header, though headers_message = msg_headers() headers_message.headers.append(CBlockHeader(all_blocks[0])) test_node.send_message(headers_message) test_node.send_message(msg_block(all_blocks[1])) test_node.sync_with_ping() self.nodes[0].getblock(all_blocks[1].hash) # Now send the blocks in all_blocks for i in range(288): test_node.send_message(msg_block(all_blocks[i])) test_node.sync_with_ping() # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead for x in all_blocks[:-1]: self.nodes[0].getblock(x.hash) assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash) self.log.info( "5. Test handling of unrequested block on the node that didn't process" ) # Should still not be processed (even though it has a child that has more # work). # The node should have requested the blocks at some point, so # disconnect/reconnect first self.nodes[0].disconnect_p2ps() self.nodes[1].disconnect_p2ps() test_node = self.nodes[0].add_p2p_connection(P2PInterface()) test_node.send_message(msg_block(block_h1f)) test_node.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 2) self.log.info( "Unrequested block that would complete more-work chain was ignored" ) self.log.info("6. Try to get node to request the missing block.") # Poke the node with an inv for block at height 3 and see if that # triggers a getdata on block 2 (it should if block 2 is missing). with mininode_lock: # Clear state so we can check the getdata request test_node.last_message.pop("getdata", None) test_node.send_message(msg_inv([CInv(2, block_h3.sha256)])) test_node.sync_with_ping() with mininode_lock: getdata = test_node.last_message["getdata"] # Check that the getdata includes the right block assert_equal(getdata.inv[0].hash, block_h1f.sha256) self.log.info("Inv at tip triggered getdata for unprocessed block") self.log.info( "7. Send the missing block for the third time (now it is requested)" ) test_node.send_message(msg_block(block_h1f)) test_node.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 290) self.nodes[0].getblock(all_blocks[286].hash) assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash) assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash) self.log.info( "Successfully reorged to longer chain from non-whitelisted peer") self.log.info( "8. Create a chain which is invalid at a height longer than the") # current chain, but which has more blocks on top of that # Reset utxo managers to current state utxo_fork_manager = UTXOManager(self.nodes[0], get_tip_snapshot_meta(self.nodes[0])) utxo_fork_manager.available_outputs = utxo_manager.available_outputs utxo_manager = UTXOManager(self.nodes[0], get_tip_snapshot_meta(self.nodes[0])) utxo_manager.available_outputs = utxo_fork_manager.available_outputs # Create one block on top of the valid chain coinbase = utxo_manager.get_coinbase(291) valid_block = create_block(all_blocks[286].sha256, coinbase, all_blocks[286].nTime + 1) valid_block.solve() test_node.send_and_ping(msg_block(valid_block)) assert_equal(self.nodes[0].getblockcount(), 291) # Create three blocks on a fork, but make the second one invalid coinbase = utxo_fork_manager.get_coinbase(291) block_291f = create_block(all_blocks[286].sha256, coinbase, all_blocks[286].nTime + 1) block_291f.solve() utxo_fork_manager.process(coinbase, 291) coinbase = utxo_fork_manager.get_coinbase(292) block_292f = create_block(block_291f.sha256, coinbase, block_291f.nTime + 1) # block_292f spends a coinbase below maturity! block_292f.vtx.append( create_tx_with_script(block_291f.vtx[0], 0, script_sig=b"42", amount=1)) block_292f.compute_merkle_trees() block_292f.solve() utxo_fork_manager.process(coinbase, 292) utxo_fork_manager.process(block_292f.vtx[1], 292) coinbase = utxo_fork_manager.get_coinbase(293) block_293f = create_block(block_292f.sha256, coinbase, block_292f.nTime + 1) block_293f.solve() utxo_fork_manager.process(coinbase, 293) # Now send all the headers on the chain and enough blocks to trigger reorg headers_message = msg_headers() headers_message.headers.append(CBlockHeader(block_291f)) headers_message.headers.append(CBlockHeader(block_292f)) headers_message.headers.append(CBlockHeader(block_293f)) test_node.send_message(headers_message) test_node.sync_with_ping() tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_293f.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_293f.hash) test_node.send_message(msg_block(block_291f)) test_node.sync_with_ping() self.nodes[0].getblock(block_291f.hash) test_node.send_message(msg_block(block_292f)) # At this point we've sent an obviously-bogus block, wait for full processing # without assuming whether we will be disconnected or not try: # Only wait a short while so the test doesn't take forever if we do get # disconnected test_node.sync_with_ping(timeout=1) except AssertionError: test_node.wait_for_disconnect() self.nodes[0].disconnect_p2ps() test_node = self.nodes[0].add_p2p_connection(P2PInterface()) # We should have failed reorg and switched back to 290 (but have block 291) assert_equal(self.nodes[0].getblockcount(), 291) assert_equal(self.nodes[0].getbestblockhash(), valid_block.hash) assert_equal(self.nodes[0].getblock(block_292f.hash)["confirmations"], -1) # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected coinbase = utxo_fork_manager.get_coinbase(294) block_294f = create_block(block_293f.sha256, coinbase, block_293f.nTime + 1) block_294f.solve() headers_message = msg_headers() headers_message.headers.append(CBlockHeader(block_294f)) test_node.send_message(headers_message) test_node.wait_for_disconnect() self.log.info( "9. Connect node1 to node0 and ensure it is able to sync") connect_nodes(self.nodes[0], 1) sync_blocks([self.nodes[0], self.nodes[1]]) self.log.info("Successfully synced nodes 1 and 0")
def run_test(self): self.setup_stake_coins(self.nodes[0]) self.nodes[0].add_p2p_connection(P2PInterface()) self.log.info("Mining one block") self.coinbase_txids = [ self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(1) ] self.nodeaddress = self.nodes[0].getnewaddress() self.log.info( "Test that invalid-according-to-cltv transactions cannot appear in a block" ) spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0], self.nodeaddress, amount=1.0) cltv_invalidate(spendtx) spendtx.rehash() # First we show that this tx is valid except for CLTV by getting it # rejected from the mempool for exactly that reason. assert_equal([{ 'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Negative locktime)' }], self.nodes[0].testmempoolaccept( rawtxs=[bytes_to_hex_str(spendtx.serialize())], allowhighfees=True)) tip = self.nodes[0].getbestblockhash() block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 snapshot_hash = get_tip_snapshot_meta(self.nodes[0]).hash coin = get_unspent_coins(self.nodes[0], 1)[0] coinbase = sign_coinbase(self.nodes[0], create_coinbase(1, coin, snapshot_hash)) block = create_block(int(tip, 16), coinbase, block_time) block.nVersion = 4 block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(self.nodes[0].getbestblockhash(), tip) wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock) with mininode_lock: assert self.nodes[0].p2p.last_message["reject"].code in [ REJECT_INVALID, REJECT_NONSTANDARD ] assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256) if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID: # Generic rejection when a block is invalid assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'block-validation-failed') else: assert b'Negative locktime' in self.nodes[0].p2p.last_message[ "reject"].reason self.log.info( "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted" ) spendtx = cltv_validate(self.nodes[0], spendtx, 0) spendtx.rehash() block.vtx.pop(1) block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature, bitno): def tip_coin(): return get_unspent_coins(self.nodes[0], 1)[0] self.setup_stake_coins(self.nodes[0]) assert_equal(self.get_bip9_status(bipName)['status'], 'defined') assert_equal(self.get_bip9_status(bipName)['since'], 0) # generate some coins self.nodes[0].generate(2) self.height = 3 # height of the next block to build self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0) self.nodeaddress = self.nodes[0].getnewaddress() self.last_block_time = int(time.time()) self.snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) assert_equal(self.get_bip9_status(bipName)['status'], 'defined') assert_equal(self.get_bip9_status(bipName)['since'], 0) # Test 1 # Advance from DEFINED to STARTED coins = get_unspent_coins(self.nodes[0], 141) test_blocks = self.generate_blocks(coins, 141, 4) yield TestInstance(test_blocks, sync_every_block=False) assert_equal(self.get_bip9_status(bipName)['status'], 'started') assert_equal(self.get_bip9_status(bipName)['since'], 144) assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0) assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0) # Test 1-A # check stats after max number of "signalling not" blocks such that LOCKED_IN still possible this period coins = get_unspent_coins(self.nodes[0], 46) test_blocks = self.generate_blocks( coins, 36, 4, test_blocks) # 0x00000004 (signalling not) test_blocks = self.generate_blocks( coins, 10, activated_version) # 0x20000001 (signalling ready) yield TestInstance(test_blocks, sync_every_block=False) assert_equal( self.get_bip9_status(bipName)['statistics']['elapsed'], 46) assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 10) assert_equal( self.get_bip9_status(bipName)['statistics']['possible'], True) # Test 1-B # check stats after one additional "signalling not" block -- LOCKED_IN no longer possible this period test_blocks = self.generate_blocks( [tip_coin()], 1, 4, test_blocks) # 0x00000004 (signalling not) yield TestInstance(test_blocks, sync_every_block=False) assert_equal( self.get_bip9_status(bipName)['statistics']['elapsed'], 47) assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 10) assert_equal( self.get_bip9_status(bipName)['statistics']['possible'], False) # Test 1-C # finish period with "ready" blocks, but soft fork will still fail to advance to LOCKED_IN coins = get_unspent_coins(self.nodes[0], 97) test_blocks = self.generate_blocks( coins, 97, activated_version) # 0x20000001 (signalling ready) yield TestInstance(test_blocks, sync_every_block=False) assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0) assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0) assert_equal( self.get_bip9_status(bipName)['statistics']['possible'], True) assert_equal(self.get_bip9_status(bipName)['status'], 'started') # Test 2 # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1 # using a variety of bits to simulate multiple parallel softforks coins = get_unspent_coins(self.nodes[0], 144) test_blocks = self.generate_blocks( coins, 50, activated_version) # 0x20000001 (signalling ready) test_blocks = self.generate_blocks( coins, 20, 4, test_blocks) # 0x00000004 (signalling not) test_blocks = self.generate_blocks( coins, 50, activated_version, test_blocks) # 0x20000101 (signalling ready) test_blocks = self.generate_blocks( coins, 24, 4, test_blocks) # 0x20010000 (signalling not) yield TestInstance(test_blocks, sync_every_block=False) assert_equal(self.get_bip9_status(bipName)['status'], 'started') assert_equal(self.get_bip9_status(bipName)['since'], 144) assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0) assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0) # Test 3 # 108 out of 144 signal bit 1 to achieve LOCKED_IN # using a variety of bits to simulate multiple parallel softforks coins = get_unspent_coins(self.nodes[0], 143) test_blocks = self.generate_blocks( coins, 57, activated_version) # 0x20000001 (signalling ready) test_blocks = self.generate_blocks( coins, 26, 4, test_blocks) # 0x00000004 (signalling not) test_blocks = self.generate_blocks( coins, 50, activated_version, test_blocks) # 0x20000101 (signalling ready) test_blocks = self.generate_blocks( coins, 10, 4, test_blocks) # 0x20010000 (signalling not) yield TestInstance(test_blocks, sync_every_block=False) # check counting stats and "possible" flag before last block of this period achieves LOCKED_IN... assert_equal( self.get_bip9_status(bipName)['statistics']['elapsed'], 143) assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 107) assert_equal( self.get_bip9_status(bipName)['statistics']['possible'], True) assert_equal(self.get_bip9_status(bipName)['status'], 'started') # ...continue with Test 3 test_blocks = self.generate_blocks( [tip_coin()], 1, activated_version) # 0x20000001 (signalling ready) yield TestInstance(test_blocks, sync_every_block=False) assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in') assert_equal(self.get_bip9_status(bipName)['since'], 576) # Test 4 # 143 more version 536870913 blocks (waiting period-1) coins = get_unspent_coins(self.nodes[0], 143) test_blocks = self.generate_blocks(coins, 143, 4) yield TestInstance(test_blocks, sync_every_block=False) assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in') assert_equal(self.get_bip9_status(bipName)['since'], 576) # Test 5 # Check that the new rule is enforced coins = get_unspent_coins(self.nodes[0], 2) spendtx = self.create_transaction(self.nodes[0], coins[0], self.nodeaddress, 1.0) invalidate(spendtx) spendtx = self.sign_transaction(self.nodes[0], spendtx) spendtx.rehash() invalidatePostSignature(spendtx) spendtx.rehash() coinbase = sign_coinbase( self.nodes[0], create_coinbase(self.height, coins[1], self.snapshot_meta.hash)) block = create_block(self.tip, coinbase, self.last_block_time + 1) block.nVersion = activated_version block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.last_block_time += 1 self.tip = block.sha256 self.height += 1 yield TestInstance([[block, True]]) assert_equal(self.get_bip9_status(bipName)['status'], 'active') assert_equal(self.get_bip9_status(bipName)['since'], 720) self.snapshot_meta = get_tip_snapshot_meta(self.nodes[0]) # Test 6 # Check that the new sequence lock rules are enforced coins = get_unspent_coins(self.nodes[0], 2) spendtx = self.create_transaction(self.nodes[0], coins.pop(), self.nodeaddress, 1.0) invalidate(spendtx) spendtx = self.sign_transaction(self.nodes[0], spendtx) spendtx.rehash() invalidatePostSignature(spendtx) spendtx.rehash() coinbase = sign_coinbase( self.nodes[0], create_coinbase(self.height, coins.pop(), self.snapshot_meta.hash)) block = create_block(self.tip, coinbase, self.last_block_time + 1) block.nVersion = 5 block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.last_block_time += 1 yield TestInstance([[block, False]]) # Restart all self.test.clear_all_connections() self.stop_nodes() self.nodes = [] shutil.rmtree(self.options.tmpdir + "/node0") self.setup_chain() self.setup_network() self.test.add_all_connections(self.nodes) network_thread_start() self.test.p2p_connections[0].wait_for_verack()
def run_test(self): self.setup_stake_coins(self.nodes[0]) self.nodes[0].add_p2p_connection(P2PInterface()) network_thread_start() # wait_for_verack ensures that the P2P connection is fully up. self.nodes[0].p2p.wait_for_verack() self.log.info("Mining one block") self.coinbase_blocks = self.nodes[0].generate(1) self.nodeaddress = self.nodes[0].getnewaddress() self.log.info("Test that transactions with non-DER signatures cannot appear in a block") spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 1.0) unDERify(spendtx) spendtx.rehash() # First we show that this tx is valid except for DERSIG by getting it # accepted to the mempool (which we can achieve with # -promiscuousmempoolflags). self.nodes[0].p2p.send_and_ping(msg_tx(spendtx)) assert spendtx.hash in self.nodes[0].getrawmempool() # Now we verify that a block with this transaction is invalid. tip = self.nodes[0].getbestblockhash() block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 snapshot_hash = get_tip_snapshot_meta(self.nodes[0]).hash coin = get_unspent_coins(self.nodes[0], 1)[0] coinbase = sign_coinbase(self.nodes[0], create_coinbase(1, coin, snapshot_hash)) block = create_block(int(tip, 16), coinbase, block_time) block.nVersion = 3 block.vtx.append(spendtx) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), int(tip, 16)) wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock) with mininode_lock: # We can receive different reject messages depending on whether # unit-e is running with multiple script check threads. If script # check threads are not in use, then transaction script validation # happens sequentially, and unit-e produces more specific reject # reasons. assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD] assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256) if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID: # Generic rejection when a block is invalid reject_reason = self.nodes[0].p2p.last_message["reject"].reason assert_equal(reject_reason, b'block-validation-failed') else: assert b'Non-canonical DER signature' in self.nodes[0].p2p.last_message["reject"].reason self.log.info("Test that a version 3 block with a DERSIG-compliant transaction is accepted") block.vtx[1] = create_transaction(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 1.0) block.compute_merkle_trees() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)