def check_mempool_sizes(self, expected_size, check_node3=True): for i in range(self.num_nodes if check_node3 else self.num_nodes - 1): mempool = self.nodes[i].getrawmempool() if len(mempool) != expected_size: # print all nodes' mempools before failing for i in range(self.num_nodes): print("Mempool for node {}: {}".format(i, mempool)) fail("Fail: Mempool for node {}: size={}, expected={}".format(i, len(mempool), expected_size))
def assert_mergetoaddress_exception(expected_error_msg, merge_to_address_lambda): try: merge_to_address_lambda() fail("Expected exception: %s" % expected_error_msg) except JSONRPCException as e: assert_equal(expected_error_msg, e.error['message']) except Exception as e: fail("Expected JSONRPCException. Found %s" % repr(e))
def verify_disconnected(self, testnode, timeout=30): sleep_time = 0.05 while timeout > 0: with mininode_lock: if testnode.conn_closed: return time.sleep(sleep_time) timeout -= sleep_time fail("Should have received pong")
def check_mcreferencedata_presence(mcblock_hash, scblock_id, sc_node): res = sc_node.block_findById(blockId=scblock_id) # print(json.dumps(res, indent=4)) refDataList = res["result"]["block"]["mainchainBlockReferencesData"] for refData in refDataList: if refData["headerHash"] == mcblock_hash: print("MC hash {0} is present in SC Block {1} mainchain reference data.".format(mcblock_hash, scblock_id)) return fail("MC hash {0} was not found in SC Block {1} mainchain reference data.".format(mcblock_hash, scblock_id))
def check_mcheader_presence(mcblock_hash, scblock_id, sc_node): res = sc_node.block_findById(blockId=scblock_id) # print(json.dumps(res, indent=4)) headers = res["result"]["block"]["mainchainHeaders"] for header in headers: if header["hash"] == mcblock_hash: print("MC hash {0} is present in SC Block {1} mainchain headers.".format(mcblock_hash, scblock_id)) return fail("MC hash {0} was not found in SC Block {1} mainchain headers.".format(mcblock_hash, scblock_id))
def sync_with_ping(self, timeout=30): self.connection.send_message(msg_ping(nonce=self.ping_counter)) sleep_time = 0.05 while timeout > 0: with mininode_lock: if self.last_pong.nonce == self.ping_counter: self.ping_counter += 1 return time.sleep(sleep_time) timeout -= sleep_time fail("Should have received pong")
def sync_with_ping(self, timeout=30, waiting_for=None): self.connection.send_message(msg_ping(nonce=self.ping_counter)) sleep_time = 0.05 while timeout > 0: with mininode_lock: ready = True if waiting_for is None else waiting_for( self) is not None if ready and self.last_pong.nonce == self.ping_counter: self.ping_counter += 1 return time.sleep(sleep_time) timeout -= sleep_time fail("Should have received pong")
def check_ommer(ommer_scblock_id, ommer_mcheaders_hashes, scblock_id, sc_node): res = sc_node.block_findById(blockId=scblock_id) ommers = res["result"]["block"]["ommers"] for ommer in ommers: if ommer["header"]["id"] == ommer_scblock_id: print("Ommer id {0} is present in SC Block {1} ommers.".format(ommer_scblock_id, scblock_id)) ommer_mcheaders = ommer["mainchainHeaders"] if len(ommer_mcheaders_hashes) == 0: return for header in ommer_mcheaders: if header["hash"] in ommer_mcheaders_hashes: print("MC hash {0} is present in Ommer {1} mainchain headers.".format(header["hash"], ommer_scblock_id)) return else: fail("MC hash {0} was not found in Ommer {1} mainchain headers.".format(header["hash"], ommer_scblock_id)) fail("Ommer id {0} was not found in SC Block {1} ommers.".format(ommer_scblock_id, scblock_id))
def run_test(self): testnode0 = TestNode() connections = [] connections.append( NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], testnode0, "regtest", OVERWINTER_PROTO_VERSION)) testnode0.add_connection(connections[0]) # Start up network handling in another thread NetworkThread().start() testnode0.wait_for_verack() # Verify mininodes are connected to zelcashd nodes peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) # Mine some blocks so we can spend coinbase_blocks = self.nodes[0].generate(200) node_address = self.nodes[0].getnewaddress() # Sync nodes 0 and 1 sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 0) # Mininodes send expiring soon transaction in "tx" message to zelcashd node self.send_transaction(testnode0, coinbase_blocks[0], node_address, 203) # Assert that the tx is not in the mempool (expiring soon) assert_equal([], self.nodes[0].getrawmempool()) assert_equal([], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Mininodes send transaction in "tx" message to zelcashd node tx2 = self.send_transaction(testnode0, coinbase_blocks[1], node_address, 204) # tx2 is not expiring soon assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) # node 2 is isolated assert_equal([], self.nodes[2].getrawmempool()) # Verify txid for tx2 self.verify_inv(testnode0, tx2) self.send_data_message(testnode0, tx2) self.verify_last_tx(testnode0, tx2) # Sync and mine an empty block with node 2, leaving tx in the mempool of node0 and node1 for blkhash in coinbase_blocks: blk = self.nodes[0].getblock(blkhash, 0) self.nodes[2].submitblock(blk) self.nodes[2].generate(1) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 201) # Reconnect node 2 to the network connect_nodes_bi(self.nodes, 0, 2) # Set up test node for node 2 testnode2 = TestNode() connections.append( NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], testnode2, "regtest", OVERWINTER_PROTO_VERSION)) testnode2.add_connection(connections[-1]) # Verify block count sync_blocks(self.nodes[:3]) assert_equal(self.nodes[0].getblockcount(), 201) assert_equal(self.nodes[1].getblockcount(), 201) assert_equal(self.nodes[2].getblockcount(), 201) # Verify contents of mempool assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Confirm tx2 cannot be submitted to a mempool because it is expiring soon. try: rawtx2 = hexlify(tx2.serialize()) self.nodes[2].sendrawtransaction(rawtx2) fail("Sending transaction should have failed") except JSONRPCException as e: assert_equal( "tx-expiring-soon: expiryheight is 204 but should be at least 205 to avoid transaction expiring soon", e.error['message']) self.send_data_message(testnode0, tx2) # Sync up with node after p2p messages delivered testnode0.sync_with_ping() # Verify node 0 does not reply to "getdata" by sending "tx" message, as tx2 is expiring soon with mininode_lock: assert_equal(testnode0.last_tx, None) # Verify mininode received a "notfound" message containing the txid of tx2 with mininode_lock: msg = testnode0.last_notfound assert_equal(len(msg.inv), 1) assert_equal(tx2.sha256, msg.inv[0].hash) # Create a transaction to verify that processing of "getdata" messages is functioning tx3 = self.send_transaction(testnode0, coinbase_blocks[2], node_address, 999) self.send_data_message(testnode0, tx3) self.verify_last_tx(testnode0, tx3) # Verify txid for tx3 is returned in "inv", but tx2 which is expiring soon is not returned self.verify_inv(testnode0, tx3) self.verify_inv(testnode2, tx3) # Verify contents of mempool assert_equal({tx2.hash, tx3.hash}, set(self.nodes[0].getrawmempool())) assert_equal({tx2.hash, tx3.hash}, set(self.nodes[1].getrawmempool())) assert_equal({tx3.hash}, set(self.nodes[2].getrawmempool())) # Verify banscore for nodes are still zero assert_equal( 0, sum(peer["banscore"] for peer in self.nodes[0].getpeerinfo())) assert_equal( 0, sum(peer["banscore"] for peer in self.nodes[2].getpeerinfo())) [c.disconnect_node() for c in connections]
def run_test(self): # Sanity-check the test harness self.nodes[0].generate(101) assert_equal(self.nodes[0].getblockcount(), 101) self.sync_all() # Node 0 shields some funds dest_addr = self.nodes[0].z_getnewaddress(POOL_NAME.lower()) taddr0 = get_coinbase_address(self.nodes[0]) if (POOL_NAME == "SPROUT"): myopid = self.nodes[0].z_shieldcoinbase(taddr0, dest_addr, 0, 1)['opid'] elif (POOL_NAME == "SAPLING"): recipients = [] recipients.append({"address": dest_addr, "amount": Decimal('10')}) myopid = self.nodes[0].z_sendmany(taddr0, recipients, 1, 0) else: fail("Unrecognized pool name: " + POOL_NAME) wait_and_assert_operationid_status(self.nodes[0], myopid) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].z_getbalance(dest_addr), Decimal('10')) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('10')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('10')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('10')) # Relaunch node 0 with in-memory size of value pools set to zero. self.restart_and_sync_node(0, TURNSTILE_ARGS) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('10')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('10')) # Node 0 creates an unshielding transaction recipients = [] recipients.append({"address": taddr0, "amount": Decimal('1')}) myopid = self.nodes[0].z_sendmany(dest_addr, recipients, 1, 0) mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid) # Verify transaction appears in mempool of nodes self.sync_all() assert (mytxid in self.nodes[0].getrawmempool()) assert (mytxid in self.nodes[1].getrawmempool()) assert (mytxid in self.nodes[2].getrawmempool()) # Node 0 mines a block count = self.nodes[0].getblockcount() self.nodes[0].generate(1) self.sync_all() # Verify the mined block does not contain the unshielding transaction block = self.nodes[0].getblock(self.nodes[0].getbestblockhash()) assert_equal(len(block["tx"]), 1) assert_equal(block["height"], count + 1) # Stop node 0 and check logs to verify the miner excluded the transaction from the block string_to_find = "CreateNewBlock: tx " + mytxid + " appears to violate " + POOL_NAME.capitalize( ) + " turnstile" check_node_log(self, 0, string_to_find) # Launch node 0 with in-memory size of value pools set to zero. self.start_and_sync_node(0, TURNSTILE_ARGS) # Node 1 mines a block oldhash = self.nodes[0].getbestblockhash() self.nodes[1].generate(1) newhash = self.nodes[1].getbestblockhash() # Verify block contains the unshielding transaction assert (mytxid in self.nodes[1].getblock(newhash)["tx"]) # Verify nodes 1 and 2 have accepted the block as valid sync_blocks(self.nodes[1:3]) sync_mempools(self.nodes[1:3]) assert_equal(len(self.nodes[1].getrawmempool()), 0) assert_equal(len(self.nodes[2].getrawmempool()), 0) # Verify node 0 has not accepted the block assert_equal(oldhash, self.nodes[0].getbestblockhash()) assert (mytxid in self.nodes[0].getrawmempool()) self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('9')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('9')) # Stop node 0 and check logs to verify the block was rejected as a turnstile violation string_to_find1 = "ConnectBlock(): turnstile violation in " + POOL_NAME.capitalize( ) + " shielded value pool" string_to_find2 = "InvalidChainFound: invalid block=" string_to_find3 = "ConnectTip(): ConnectBlock " + newhash + " failed" check_node_log(self, 0, string_to_find1, True) check_node_log(self, 0, string_to_find2, False) check_node_log(self, 0, string_to_find3, False) self.start_and_sync_node(0) assert_equal(newhash, self.nodes[0].getbestblockhash())
def run_test(self): time.sleep(0.1) self.sync_all() mc_node = self.nodes[0] sc_node = self.sc_nodes[0] # Check that MC block with sc creation tx is referenced in the genesis sc block mcblock_hash0 = mc_node.getbestblockhash() scblock_id0 = sc_node.block_best()["result"]["block"]["id"] check_mcreference_presence(mcblock_hash0, scblock_id0, sc_node) # Check that MC block with sc creation tx height is the same as in genesis info. sc_creation_mc_block_height = mc_node.getblock(mcblock_hash0)["height"] assert_equal( sc_creation_mc_block_height, self.sc_nodes_bootstrap_info.mainchain_block_height, "Genesis info expected to have the same genesis mc block height as in MC node." ) # check all keys/boxes/balances are coherent with the default initialization check_wallet_balance( sc_node, self.sc_nodes_bootstrap_info.genesis_account_balance) check_box_balance(sc_node, self.sc_nodes_bootstrap_info.genesis_account, 3, 1, self.sc_nodes_bootstrap_info.genesis_account_balance) # create FT to SC to withdraw later sc_address = sc_node.wallet_createPrivateKey25519( )["result"]["proposition"]["publicKey"] sc_account = Account("", sc_address) ft_amount = 10 mc_node.sc_send(sc_address, ft_amount, self.sc_nodes_bootstrap_info.sidechain_id) assert_equal(1, mc_node.getmempoolinfo()["size"], "Forward Transfer expected to be added to mempool.") # Generate MC block and SC block and check that FT appears in SC node wallet mcblock_hash1 = mc_node.generate(1)[0] scblock_id1 = generate_next_blocks(sc_node, "first node", 1)[0] check_mcreference_presence(mcblock_hash1, scblock_id1, sc_node) # check all keys/boxes/balances are coherent with the default initialization check_wallet_balance( sc_node, self.sc_nodes_bootstrap_info.genesis_account_balance + ft_amount) check_box_balance(sc_node, sc_account, 1, 1, ft_amount) # Generate 8 more MC block to finish the first withdrawal epoch, then generate 3 more SC block to sync with MC. we0_end_mcblock_hash = mc_node.generate(8)[7] scblock_id2 = generate_next_blocks(sc_node, "first node", 3)[2] check_mcreferencedata_presence(we0_end_mcblock_hash, scblock_id2, sc_node) # Generate first mc block of the next epoch we1_1_mcblock_hash = mc_node.generate(1)[0] print("End mc block hash in withdrawal epoch 0 = " + we0_end_mcblock_hash) scblock_id3 = generate_next_blocks(sc_node, "first node", 1)[0] check_mcreference_presence(we1_1_mcblock_hash, scblock_id3, sc_node) # Wait until Certificate will appear in MC node mempool attempts = 20 while mc_node.getmempoolinfo()["size"] == 0 and attempts > 0: print("Wait for certificate in mc mempool...") time.sleep(10) attempts -= 1 sc_node.block_best( ) # just a ping to SC node. For some reason, STF can't request SC node API after a while idle. assert_equal(1, mc_node.getmempoolinfo()["size"], "Certificate was not added to Mc node mmepool.") # Get Certificate for Withdrawal epoch 0 and verify it we0_certHash = mc_node.getrawmempool()[0] print("Withdrawal epoch 0 certificate hash = " + we0_certHash) we0_cert = mc_node.getrawcertificate(we0_certHash, 1) assert_equal(self.sc_nodes_bootstrap_info.sidechain_id, we0_cert["cert"]["scid"], "Sidechain Id in certificate is wrong.") assert_equal(0, we0_cert["cert"]["epochNumber"], "Sidechain epoch number in certificate is wrong.") assert_equal(we0_end_mcblock_hash, we0_cert["cert"]["endEpochBlockHash"], "Sidechain endEpochBlockHash in certificate is wrong.") assert_equal(0, we0_cert["cert"]["totalAmount"], "Sidechain total amount in certificate is wrong.") # Generate MC block and verify that certificate is present we1_2_mcblock_hash = mc_node.generate(1)[0] assert_equal( 0, mc_node.getmempoolinfo()["size"], "Certificate expected to be removed from MC node mempool.") assert_equal(1, len(mc_node.getblock(we1_2_mcblock_hash)["tx"]), "MC block expected to contain 1 transaction.") assert_equal(1, len(mc_node.getblock(we1_2_mcblock_hash)["cert"]), "MC block expected to contain 1 Certificate.") assert_equal(we0_certHash, mc_node.getblock(we1_2_mcblock_hash)["cert"][0], "MC block expected to contain certificate.") print( "MC block with withdrawal certificate for epoch 0 = {0}\n".format( str(mc_node.getblock(we1_2_mcblock_hash, False)))) # Generate SC block and verify that certificate is synced back scblock_id4 = generate_next_blocks(sc_node, "first node", 1)[0] check_mcreference_presence(we1_2_mcblock_hash, scblock_id4, sc_node) # Verify Certificate for epoch 0 on SC side we0_sc_cert = sc_node.block_best()["result"]["block"][ "mainchainBlockReferencesData"][0]["withdrawalEpochCertificate"] assert_equal(self.sc_nodes_bootstrap_info.sidechain_id, we0_sc_cert["sidechainId"], "Sidechain Id in certificate is wrong.") assert_equal(0, we0_sc_cert["epochNumber"], "Sidechain epoch number in certificate is wrong.") assert_equal(we0_end_mcblock_hash, we0_sc_cert["endEpochBlockHash"], "Sidechain endEpochBlockHash in certificate is wrong.") assert_equal(0, len(we0_sc_cert["backwardTransferOutputs"]), "Backward transfer amount in certificate is wrong.") assert_equal(we0_certHash, we0_sc_cert["hash"], "Certificate hash is different to the one in MC.") # Try to withdraw coins from SC to MC: 2 withdrawals with the same amount addresses = mc_node.listaddresses() mc_address1_hash = mc_node.getnewaddress("", True) mc_address1_standard = (set(mc_node.listaddresses()) - set(addresses)).pop() print("First BT MC public key hash is {}".format(mc_address1_hash)) print("First BT MC public key address is {}".format( mc_address1_standard)) bt_amount1 = ft_amount - 3 sc_bt_amount1 = bt_amount1 * 100000000 # in Satoshi withdrawal_request = {"outputs": [ \ { "publicKey": mc_address1_standard, "value": sc_bt_amount1 } ] } withdrawCoinsJson = sc_node.transaction_withdrawCoins( json.dumps(withdrawal_request)) if "result" not in withdrawCoinsJson: fail("Withdraw coins failed: " + json.dumps(withdrawCoinsJson)) else: print("Coins withdrawn: " + json.dumps(withdrawCoinsJson)) # Generate SC block generate_next_blocks(sc_node, "first node", 1) addresses = mc_node.listaddresses() mc_address2_hash = self.nodes[0].getnewaddress("", True) mc_address2_standard = (set(mc_node.listaddresses()) - set(addresses)).pop() print("Second BT MC public key hash is {}".format(mc_address2_hash)) print("Second BT MC public key address is {}".format( mc_address2_standard)) bt_amount2 = ft_amount - bt_amount1 sc_bt_amount2 = bt_amount2 * 100000000 # in Satoshi withdrawal_request = {"outputs": [ \ { "publicKey": mc_address2_standard, "value": sc_bt_amount2 } ] } withdrawCoinsJson = sc_node.transaction_withdrawCoins( json.dumps(withdrawal_request)) if "result" not in withdrawCoinsJson: fail("Withdraw coins failed: " + json.dumps(withdrawCoinsJson)) else: print("Coins withdrawn: " + json.dumps(withdrawCoinsJson)) sc_node.transaction_withdrawCoins(json.dumps(withdrawal_request)) # Generate SC block generate_next_blocks(sc_node, "first node", 1) # Generate 8 more MC block to finish the first withdrawal epoch, then generate 3 more SC block to sync with MC. we1_end_mcblock_hash = mc_node.generate(8)[7] we1_end_scblock_id = generate_next_blocks(sc_node, "first node", 3)[2] check_mcreferencedata_presence(we1_end_mcblock_hash, we1_end_scblock_id, sc_node) # Generate first mc block of the next epoch we2_1_mcblock_hash = mc_node.generate(1)[0] print("End mc block hash in withdrawal epoch 1 = " + we2_1_mcblock_hash) we2_1_scblock_id = generate_next_blocks(sc_node, "first node", 1)[0] check_mcreference_presence(we2_1_mcblock_hash, we2_1_scblock_id, sc_node) # Wait until Certificate will appear in MC node mempool attempts = 20 while mc_node.getmempoolinfo()["size"] == 0 and attempts > 0: print("Wait for certificate in mc mempool...") time.sleep(10) attempts -= 1 sc_node.block_best( ) # just a ping to SC node. For some reason, STF can't request SC node API after a while idle. assert_equal(1, mc_node.getmempoolinfo()["size"], "Certificate was not added to Mc node mmepool.") # Get Certificate for Withdrawal epoch 1 and verify it we1_certHash = mc_node.getrawmempool()[0] print("Withdrawal epoch 1 certificate hash = " + we1_certHash) we1_cert = mc_node.getrawcertificate(we1_certHash, 1) assert_equal(self.sc_nodes_bootstrap_info.sidechain_id, we1_cert["cert"]["scid"], "Sidechain Id in certificate is wrong.") assert_equal(1, we1_cert["cert"]["epochNumber"], "Sidechain epoch number in certificate is wrong.") assert_equal(we1_end_mcblock_hash, we1_cert["cert"]["endEpochBlockHash"], "Sidechain endEpochBlockHash in certificate is wrong.") assert_equal(bt_amount1 + bt_amount2, we1_cert["cert"]["totalAmount"], "Sidechain total amount in certificate is wrong.") # Generate MC block and verify that certificate is present we2_2_mcblock_hash = mc_node.generate(1)[0] assert_equal( 0, mc_node.getmempoolinfo()["size"], "Certificate expected to be removed from MC node mempool.") assert_equal(1, len(mc_node.getblock(we2_2_mcblock_hash)["tx"]), "MC block expected to contain 1 transaction.") assert_equal(1, len(mc_node.getblock(we2_2_mcblock_hash)["cert"]), "MC block expected to contain 1 Certificate.") assert_equal(we1_certHash, mc_node.getblock(we2_2_mcblock_hash)["cert"][0], "MC block expected to contain certificate.") # Check certificate BT entries assert_equal(bt_amount1, we1_cert["vout"][1]["value"], "First BT amount is wrong.") assert_equal(bt_amount2, we1_cert["vout"][2]["value"], "Second BT amount is wrong.") cert_address_1 = we1_cert["vout"][1]["scriptPubKey"]["addresses"][0] assert_equal(mc_address1_standard, cert_address_1, "First BT standard address is wrong.") cert_address_2 = we1_cert["vout"][2]["scriptPubKey"]["addresses"][0] assert_equal(mc_address2_standard, cert_address_2, "Second BT standard address is wrong.") cert_address_hash_1 = we1_cert["vout"][1]["pubkeyhash"] assert_equal(mc_address1_hash, cert_address_hash_1, "First BT pub key hash address is wrong.") cert_address_hash_2 = we1_cert["vout"][2]["pubkeyhash"] assert_equal(mc_address2_hash, cert_address_hash_2, "Second BT pub key hash address is wrong.") # Check changes in balances in MC # Note destination addresses also can contain some fees assigned to them during mining assert_equal( bt_amount1, math.floor(mc_node.getreceivedbyaddress(mc_address1_standard)), "First BT amount expected to be found in MC wallet") assert_equal( bt_amount2, math.floor(mc_node.getreceivedbyaddress(mc_address2_standard)), "Second BT amount expected to be found in MC wallet") # Generate SC block and verify that certificate is synced back scblock_id5 = generate_next_blocks(sc_node, "first node", 1)[0] check_mcreference_presence(we2_2_mcblock_hash, scblock_id5, sc_node) # Verify Certificate for epoch 1 on SC side we1_sc_cert = sc_node.block_best()["result"]["block"][ "mainchainBlockReferencesData"][0]["withdrawalEpochCertificate"] assert_equal(self.sc_nodes_bootstrap_info.sidechain_id, we1_sc_cert["sidechainId"], "Sidechain Id in certificate is wrong.") assert_equal(1, we1_sc_cert["epochNumber"], "Sidechain epoch number in certificate is wrong.") assert_equal(we1_end_mcblock_hash, we1_sc_cert["endEpochBlockHash"], "Sidechain endEpochBlockHash in certificate is wrong.") assert_equal(2, len(we1_sc_cert["backwardTransferOutputs"]), "Backward transfer amount in certificate is wrong.") sc_pub_key_hash_1 = we1_sc_cert["backwardTransferOutputs"][0][ "pubKeyHash"] assert_equal(mc_address1_standard, sc_pub_key_hash_1, "First BT address is wrong.") assert_equal(sc_bt_amount1, we1_sc_cert["backwardTransferOutputs"][0]["amount"], "First BT amount is wrong.") sc_pub_key_hash_2 = we1_sc_cert["backwardTransferOutputs"][1][ "pubKeyHash"] assert_equal(mc_address2_standard, sc_pub_key_hash_2, "Second BT address is wrong.") assert_equal(sc_bt_amount2, we1_sc_cert["backwardTransferOutputs"][1]["amount"], "Second BT amount is wrong.") assert_equal(we1_certHash, we1_sc_cert["hash"], "Certificate hash is different to the one in MC.")
def run_test(self): self.nodes[0].generate(105) self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) # Test getrawtransaction changes and the getspentinfo RPC # send coinbase to address addr1 addr1 = self.nodes[1].getnewaddress() txid1 = self.nodes[0].sendtoaddress(addr1, 2) self.sync_all() block_hash1 = self.nodes[0].generate(1) self.sync_all() # send from addr1 to addr2 # (the only utxo on node 1 is from address addr1) addr2 = self.nodes[2].getnewaddress() txid2 = self.nodes[1].sendtoaddress(addr2, 1) self.sync_all() # addr1 to addr2 transaction is not confirmed, so it has no height tx2 = self.nodes[2].getrawtransaction(txid2, 1) assert ('height' not in tx2) # confirm addr1 to addr2 transaction block_hash2 = self.nodes[0].generate(1) self.sync_all() # Restart all nodes to ensure index files are saved to disk and recovered stop_nodes(self.nodes) wait_bitcoinds() self.setup_network() # Check new fields added to getrawtransaction tx1 = self.nodes[2].getrawtransaction(txid1, 1) assert_equal(tx1['vin'][0]['value'], 10) # coinbase assert_equal(tx1['vin'][0]['valueSat'], 10 * COIN) # we want the non-change (payment) output vout = filter(lambda o: o['value'] == 2, tx1['vout']) n = vout[0]['n'] assert_equal(vout[0]['spentTxId'], txid2) assert_equal(vout[0]['spentIndex'], 0) assert_equal(vout[0]['spentHeight'], 107) assert_equal(tx1['height'], 106) tx2 = self.nodes[2].getrawtransaction(txid2, 1) assert_equal(tx2['vin'][0]['address'], addr1) assert_equal(tx2['vin'][0]['value'], 2) assert_equal(tx2['vin'][0]['valueSat'], 2 * COIN) # since this transaction's outputs haven't yet been # spent, these fields should not be present assert ('spentTxId' not in tx2['vout'][0]) assert ('spentIndex' not in tx2['vout'][0]) assert ('spentHeight' not in tx2['vout'][0]) assert_equal(tx2['height'], 107) # Given a transaction output, getspentinfo() returns a reference # to the (later, confirmed) transaction that spent that output, # that is, the transaction that used this output as an input. spentinfo = self.nodes[2].getspentinfo({'txid': txid1, 'index': n}) assert_equal(spentinfo['height'], 107) assert_equal(spentinfo['index'], 0) assert_equal(spentinfo['txid'], txid2) # specifying an output that hasn't been spent should fail try: self.nodes[1].getspentinfo({'txid': txid2, 'index': 0}) fail('getspentinfo should have thrown an exception') except JSONRPCException, e: assert_equal(e.error['message'], "Unable to get spent info")
def run_test(self): testnode0 = TestNode() connections = [] connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], testnode0, "regtest", OVERWINTER_PROTO_VERSION)) testnode0.add_connection(connections[0]) # Start up network handling in another thread NetworkThread().start() testnode0.wait_for_verack() # Verify mininodes are connected to zcashd nodes peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) # Mine some blocks so we can spend coinbase_blocks = self.nodes[0].generate(200) node_address = self.nodes[0].getnewaddress() # Sync nodes 0 and 1 sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 0) # Mininodes send expiring soon transaction in "tx" message to zcashd node self.send_transaction(testnode0, coinbase_blocks[0], node_address, 203) # Assert that the tx is not in the mempool (expiring soon) assert_equal([], self.nodes[0].getrawmempool()) assert_equal([], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Mininodes send transaction in "tx" message to zcashd node tx2 = self.send_transaction(testnode0, coinbase_blocks[1], node_address, 204) # tx2 is not expiring soon assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) # node 2 is isolated assert_equal([], self.nodes[2].getrawmempool()) # Verify txid for tx2 self.verify_inv(testnode0, tx2) self.send_data_message(testnode0, tx2) self.verify_last_tx(testnode0, tx2) # Sync and mine an empty block with node 2, leaving tx in the mempool of node0 and node1 for blkhash in coinbase_blocks: blk = self.nodes[0].getblock(blkhash, 0) self.nodes[2].submitblock(blk) self.nodes[2].generate(1) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 201) # Reconnect node 2 to the network connect_nodes_bi(self.nodes, 0, 2) # Set up test node for node 2 testnode2 = TestNode() connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], testnode2, "regtest", OVERWINTER_PROTO_VERSION)) testnode2.add_connection(connections[-1]) # Verify block count sync_blocks(self.nodes[:3]) assert_equal(self.nodes[0].getblockcount(), 201) assert_equal(self.nodes[1].getblockcount(), 201) assert_equal(self.nodes[2].getblockcount(), 201) # Verify contents of mempool assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Confirm tx2 cannot be submitted to a mempool because it is expiring soon. try: rawtx2 = hexlify(tx2.serialize()) self.nodes[2].sendrawtransaction(rawtx2) fail("Sending transaction should have failed") except JSONRPCException as e: assert_equal( "tx-expiring-soon: expiryheight is 204 but should be at least 205 to avoid transaction expiring soon", e.error['message'] ) self.send_data_message(testnode0, tx2) # Sync up with node after p2p messages delivered testnode0.sync_with_ping() # Verify node 0 does not reply to "getdata" by sending "tx" message, as tx2 is expiring soon with mininode_lock: assert_equal(testnode0.last_tx, None) # Verify mininode received a "notfound" message containing the txid of tx2 with mininode_lock: msg = testnode0.last_notfound assert_equal(len(msg.inv), 1) assert_equal(tx2.sha256, msg.inv[0].hash) # Create a transaction to verify that processing of "getdata" messages is functioning tx3 = self.send_transaction(testnode0, coinbase_blocks[2], node_address, 999) self.send_data_message(testnode0, tx3) self.verify_last_tx(testnode0, tx3) # Verify txid for tx3 is returned in "inv", but tx2 which is expiring soon is not returned self.verify_inv(testnode0, tx3) self.verify_inv(testnode2, tx3) # Verify contents of mempool assert_equal({tx2.hash, tx3.hash}, set(self.nodes[0].getrawmempool())) assert_equal({tx2.hash, tx3.hash}, set(self.nodes[1].getrawmempool())) assert_equal({tx3.hash}, set(self.nodes[2].getrawmempool())) # Verify banscore for nodes are still zero assert_equal(0, sum(peer["banscore"] for peer in self.nodes[0].getpeerinfo())) assert_equal(0, sum(peer["banscore"] for peer in self.nodes[2].getpeerinfo())) [c.disconnect_node() for c in connections]
def run_test(self): self.nodes[0].generate(105) self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) # Test getrawtransaction changes and the getspentinfo RPC # send coinbase to address addr1 addr1 = self.nodes[1].getnewaddress() txid1 = self.nodes[0].sendtoaddress(addr1, 2) self.sync_all() block_hash1 = self.nodes[0].generate(1) self.sync_all() # send from addr1 to addr2 # (the only utxo on node 1 is from address addr1) addr2 = self.nodes[2].getnewaddress() txid2 = self.nodes[1].sendtoaddress(addr2, 1) self.sync_all() # addr1 to addr2 transaction is not confirmed, so it has no height tx2 = self.nodes[2].getrawtransaction(txid2, 1) assert('height' not in tx2) # confirm addr1 to addr2 transaction block_hash2 = self.nodes[0].generate(1) self.sync_all() # Restart all nodes to ensure index files are saved to disk and recovered stop_nodes(self.nodes) wait_bitcoinds() self.setup_network() # Check new fields added to getrawtransaction tx1 = self.nodes[2].getrawtransaction(txid1, 1) assert_equal(tx1['vin'][0]['value'], 97) # coinbase assert_equal(tx1['vin'][0]['valueSat'], 97*COIN) # we want the non-change (payment) output vout = list(filter(lambda o: o['value'] == 2, tx1['vout'])) n = vout[0]['n'] assert_equal(vout[0]['spentTxId'], txid2) assert_equal(vout[0]['spentIndex'], 0) assert_equal(vout[0]['spentHeight'], 107) assert_equal(tx1['height'], 106) tx2 = self.nodes[2].getrawtransaction(txid2, 1) assert_equal(tx2['vin'][0]['address'], addr1) assert_equal(tx2['vin'][0]['value'], 2) assert_equal(tx2['vin'][0]['valueSat'], 2*COIN) # since this transaction's outputs haven't yet been # spent, these fields should not be present assert('spentTxId' not in tx2['vout'][0]) assert('spentIndex' not in tx2['vout'][0]) assert('spentHeight' not in tx2['vout'][0]) assert_equal(tx2['height'], 107) # Given a transaction output, getspentinfo() returns a reference # to the (later, confirmed) transaction that spent that output, # that is, the transaction that used this output as an input. spentinfo = self.nodes[2].getspentinfo({'txid': txid1, 'index': n}) assert_equal(spentinfo['height'], 107) assert_equal(spentinfo['index'], 0) assert_equal(spentinfo['txid'], txid2) # specifying an output that hasn't been spent should fail try: self.nodes[1].getspentinfo({'txid': txid2, 'index': 0}) fail('getspentinfo should have thrown an exception') except JSONRPCException as e: assert_equal(e.error['message'], "Unable to get spent info") block_hash_next = self.nodes[0].generate(1) self.sync_all() # Test the getblockdeltas RPC blockdeltas = self.nodes[2].getblockdeltas(block_hash1[0]) assert_equal(blockdeltas['confirmations'], 3) assert_equal(blockdeltas['height'], 106) assert_equal(blockdeltas['version'], 5) assert_equal(blockdeltas['hash'], block_hash1[0]) assert_equal(blockdeltas['nextblockhash'], block_hash2[0]) deltas = blockdeltas['deltas'] # block contains two transactions, coinbase, and earlier coinbase to addr1 assert_equal(len(deltas), 2) coinbase_tx = deltas[0] assert_equal(coinbase_tx['index'], 0) assert_equal(len(coinbase_tx['inputs']), 0) assert_equal(len(coinbase_tx['outputs']), 2) assert_equal(coinbase_tx['outputs'][0]['index'], 0) assert_equal(coinbase_tx['outputs'][1]['index'], 1) assert_equal(coinbase_tx['outputs'][1]['satoshis'], 300000024) to_a_tx = deltas[1] assert_equal(to_a_tx['index'], 1) assert_equal(to_a_tx['txid'], txid1) assert_equal(len(to_a_tx['inputs']), 1) assert_equal(to_a_tx['inputs'][0]['index'], 0) assert_equal(to_a_tx['inputs'][0]['prevout'], 0) assert_equal(to_a_tx['inputs'][0]['satoshis'], -97*COIN) assert_equal(len(to_a_tx['outputs']), 2) # find the nonchange output, which is the payment to addr1 out = list(filter(lambda o: o['satoshis'] == 2*COIN, to_a_tx['outputs'])) assert_equal(len(out), 1) assert_equal(out[0]['address'], addr1) blockdeltas = self.nodes[2].getblockdeltas(block_hash2[0]) assert_equal(blockdeltas['confirmations'], 2) assert_equal(blockdeltas['height'], 107) assert_equal(blockdeltas['version'], 5) assert_equal(blockdeltas['hash'], block_hash2[0]) assert_equal(blockdeltas['previousblockhash'], block_hash1[0]) assert_equal(blockdeltas['nextblockhash'], block_hash_next[0]) deltas = blockdeltas['deltas'] assert_equal(len(deltas), 2) coinbase_tx = deltas[0] assert_equal(coinbase_tx['index'], 0) assert_equal(len(coinbase_tx['inputs']), 0) assert_equal(len(coinbase_tx['outputs']), 2) assert_equal(coinbase_tx['outputs'][0]['index'], 0) assert_equal(coinbase_tx['outputs'][1]['index'], 1) assert_equal(coinbase_tx['outputs'][1]['satoshis'], 300000024) to_b_tx = deltas[1] assert_equal(to_b_tx['index'], 1) assert_equal(to_b_tx['txid'], txid2) assert_equal(len(to_b_tx['inputs']), 1) assert_equal(to_b_tx['inputs'][0]['index'], 0) assert_equal(to_b_tx['inputs'][0]['prevtxid'], txid1) assert_equal(to_b_tx['inputs'][0]['satoshis'], -2*COIN) assert_equal(len(to_b_tx['outputs']), 2) # find the nonchange output, which is the payment to addr2 out = list(filter(lambda o: o['satoshis'] == 1*COIN, to_b_tx['outputs'])) assert_equal(len(out), 1) assert_equal(out[0]['address'], addr2)