def run_test(self): self.log.info("estimatefee: Shows deprecated message") assert_raises_jsonrpc(-32, 'estimatefee is deprecated', self.nodes[0].estimatefee, 1) self.log.info("Using -deprecatedrpc=estimatefee bypasses the error") self.nodes[1].estimatefee(1)
def test_categories(self): node = self.nodes[0] # wrong argument count assert_raises_jsonrpc(-1, 'help', node.help, 'foo', 'bar') # invalid argument assert_raises_jsonrpc(-1, 'JSON value is not a string as expected', node.help, 0) # help of unknown command assert_equal(node.help('foo'), 'help: unknown command: foo') # command titles titles = [ line[3:-3] for line in node.help().splitlines() if line.startswith('==') ] components = [ 'Blockchain', 'Control', 'Generating', 'Mining', 'Network', 'Rawtransactions', 'Util' ] if self.is_wallet_compiled(): components.append('Wallet') # no RPC for Zmq as of yet in current version # if self.is_zmq_compiled(): # components.append('Zmq') assert_equal(titles, components)
def _test_getchaintxstats(self): chaintxstats = self.nodes[0].getchaintxstats(1) # 200 txs plus genesis tx assert_equal(chaintxstats['txcount'], 201) # tx rate should be 1 per 10 minutes, or 1/600 # we have to round because of binary math assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1)) b1 = self.nodes[0].getblock(self.nodes[0].getblockhash(1)) b200 = self.nodes[0].getblock(self.nodes[0].getblockhash(200)) time_diff = b200['mediantime'] - b1['mediantime'] chaintxstats = self.nodes[0].getchaintxstats() assert_equal(chaintxstats['time'], b200['time']) assert_equal(chaintxstats['txcount'], 201) assert_equal(chaintxstats['window_block_count'], 199) assert_equal(chaintxstats['window_tx_count'], 199) assert_equal(chaintxstats['window_interval'], time_diff) assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199)) chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1['hash']) assert_equal(chaintxstats['time'], b1['time']) assert_equal(chaintxstats['txcount'], 2) assert_equal(chaintxstats['window_block_count'], 0) assert ('window_tx_count' not in chaintxstats) assert ('window_interval' not in chaintxstats) assert ('txrate' not in chaintxstats) assert_raises_jsonrpc( -8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, 201)
def run_test(self): assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True) assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2) self.nodes[0].setnetworkactive(False) assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False) timeout = 3 while self.nodes[0].getnetworkinfo()['connections'] != 0: # Wait a bit for all sockets to close assert timeout > 0, 'not all connections closed in time' timeout -= 0.1 time.sleep(0.1) self.nodes[0].setnetworkactive(True) connect_nodes_bi(self.nodes, 0, 1) assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True) assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2) # test getaddednodeinfo assert_equal(self.nodes[0].getaddednodeinfo(), []) # add a node (node2) to node0 ip_port = "127.0.0.1:{}".format(p2p_port(2)) self.nodes[0].addnode(ip_port, 'add') # check that the node has indeed been added added_nodes = self.nodes[0].getaddednodeinfo(ip_port) assert_equal(len(added_nodes), 1) assert_equal(added_nodes[0]['addednode'], ip_port) # check that a non-existant node returns an error assert_raises_jsonrpc(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1')
def _test_getblockheader(self): node = self.nodes[0] assert_raises_jsonrpc(-5, "Block not found", node.getblockheader, "nonsense") besthash = node.getbestblockhash() secondbesthash = node.getblockhash(199) header = node.getblockheader(besthash) assert_equal(header['hash'], besthash) assert_equal(header['height'], 200) assert_equal(header['confirmations'], 1) assert_equal(header['previousblockhash'], secondbesthash) assert_is_hex_string(header['chainwork']) assert_is_hash_string(header['hash']) assert_is_hash_string(header['previousblockhash']) assert_is_hash_string(header['merkleroot']) assert_is_hash_string(header['bits'], length=None) assert isinstance(header['time'], int) assert isinstance(header['mediantime'], int) assert isinstance(header['nonceUint32'], int) assert_is_hash_string(header['nonce'], length=None) assert isinstance(header['version'], int) assert isinstance(int(header['versionHex'], 16), int) assert isinstance(header['difficulty'], Decimal)
def run_test(self): assert_equal(set(self.nodes[0].listwallets()), {"w1", "w2", "w3"}) self.stop_node(0) # should not initialize if there are duplicate wallets self.assert_start_raises_init_error(0, ['-wallet=w1', '-wallet=w1'], 'Error loading wallet w1. Duplicate -wallet filename specified.') # should not initialize if wallet file is a directory os.mkdir(os.path.join(self.options.tmpdir, 'node0', 'regtest', 'w11')) self.assert_start_raises_init_error(0, ['-wallet=w11'], 'Error loading wallet w11. -wallet filename must be a regular file.') # should not initialize if wallet file is a symlink os.symlink(os.path.join(self.options.tmpdir, 'node0', 'regtest', 'w1'), os.path.join(self.options.tmpdir, 'node0', 'regtest', 'w12')) self.assert_start_raises_init_error(0, ['-wallet=w12'], 'Error loading wallet w12. -wallet filename must be a regular file.') self.start_node(0, self.extra_args[0]) w1 = self.nodes[0].get_wallet_rpc("w1") w2 = self.nodes[0].get_wallet_rpc("w2") w3 = self.nodes[0].get_wallet_rpc("w3") wallet_bad = self.nodes[0].get_wallet_rpc("bad") w1.generate(1) # accessing invalid wallet fails assert_raises_jsonrpc(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo) # accessing wallet RPC without using wallet endpoint fails assert_raises_jsonrpc(-19, "Wallet file not specified", self.nodes[0].getwalletinfo) # check w1 wallet balance w1_info = w1.getwalletinfo() assert_equal(w1_info['immature_balance'], 50) w1_name = w1_info['walletname'] assert_equal(w1_name, "w1") # check w2 wallet balance w2_info = w2.getwalletinfo() assert_equal(w2_info['immature_balance'], 0) w2_name = w2_info['walletname'] assert_equal(w2_name, "w2") w3_name = w3.getwalletinfo()['walletname'] assert_equal(w3_name, "w3") assert_equal({"w1", "w2", "w3"}, {w1_name, w2_name, w3_name}) w1.generate(101) assert_equal(w1.getbalance(), 100) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) w1.sendtoaddress(w2.getnewaddress(), 1) w1.sendtoaddress(w3.getnewaddress(), 2) w1.generate(1) assert_equal(w2.getbalance(), 1) assert_equal(w3.getbalance(), 2)
def run_test(self): self.stop_node(0) # should not initialize if there are duplicate wallets self.assert_start_raises_init_error(0, ['-wallet=w1', '-wallet=w1'], 'Error loading wallet w1. Duplicate -wallet filename specified.') # should not initialize if wallet file is a directory os.mkdir(os.path.join(self.options.tmpdir, 'node0', 'regtest', 'w11')) self.assert_start_raises_init_error(0, ['-wallet=w11'], 'Error loading wallet w11. -wallet filename must be a regular file.') # should not initialize if wallet file is a symlink os.symlink(os.path.join(self.options.tmpdir, 'node0', 'regtest', 'w1'), os.path.join(self.options.tmpdir, 'node0', 'regtest', 'w12')) self.assert_start_raises_init_error(0, ['-wallet=w12'], 'Error loading wallet w12. -wallet filename must be a regular file.') self.start_node(0, self.extra_args[0]) w1 = self.nodes[0].get_wallet_rpc("w1") w2 = self.nodes[0].get_wallet_rpc("w2") w3 = self.nodes[0].get_wallet_rpc("w3") wallet_bad = self.nodes[0].get_wallet_rpc("bad") w1.generate(1) # accessing invalid wallet fails assert_raises_jsonrpc(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo) # accessing wallet RPC without using wallet endpoint fails assert_raises_jsonrpc(-19, "Wallet file not specified", self.nodes[0].getwalletinfo) # check w1 wallet balance w1_info = w1.getwalletinfo() assert_equal(w1_info['immature_balance'], 50) w1_name = w1_info['walletname'] assert_equal(w1_name, "w1") # check w2 wallet balance w2_info = w2.getwalletinfo() assert_equal(w2_info['immature_balance'], 0) w2_name = w2_info['walletname'] assert_equal(w2_name, "w2") w3_name = w3.getwalletinfo()['walletname'] assert_equal(w3_name, "w3") assert_equal({"w1", "w2", "w3"}, {w1_name, w2_name, w3_name}) w1.generate(101) assert_equal(w1.getbalance(), 100) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) w1.sendtoaddress(w2.getnewaddress(), 1) w1.sendtoaddress(w3.getnewaddress(), 2) w1.generate(1) assert_equal(w2.getbalance(), 1) assert_equal(w3.getbalance(), 2)
def run_test(self): # Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled. assert_raises_jsonrpc(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions) # Should return an empty array if there aren't unconfirmed wallet transactions. self.stop_node(0) self.nodes[0] = self.start_node(0, self.options.tmpdir) assert_equal(self.nodes[0].resendwallettransactions(), []) # Should return an array with the unconfirmed wallet transaction. txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) assert_equal(self.nodes[0].resendwallettransactions(), [txid])
def _test_getaddednodeinfo(self): assert_equal(self.nodes[0].getaddednodeinfo(), []) # add a node (node2) to node0 ip_port = "127.0.0.1:{}".format(p2p_port(2)) self.nodes[0].addnode(ip_port, 'add') # check that the node has indeed been added added_nodes = self.nodes[0].getaddednodeinfo(ip_port) assert_equal(len(added_nodes), 1) assert_equal(added_nodes[0]['addednode'], ip_port) # check that a non-existant node returns an error assert_raises_jsonrpc(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1')
def run_test(self): # Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled. assert_raises_jsonrpc(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions) # Should return an empty array if there aren't unconfirmed wallet transactions. self.stop_node(0) self.start_node(0, extra_args=[]) assert_equal(self.nodes[0].resendwallettransactions(), []) # Should return an array with the unconfirmed wallet transaction. txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) assert_equal(self.nodes[0].resendwallettransactions(), [txid])
def run_test(self): passphrase = "WalletPassphrase" passphrase2 = "SecondWalletPassphrase" # Make sure the wallet isn't encrypted first address = self.nodes[0].getnewaddress() privkey = self.nodes[0].dumpprivkey(address) assert_equal(privkey[:1], "c") assert_equal(len(privkey), 52) # Encrypt the wallet self.nodes[0].encryptwallet(passphrase) self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes[0] = self.start_node(0, self.options.tmpdir) # Test that the wallet is encrypted assert_raises_jsonrpc( -13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) # Check that walletpassphrase works self.nodes[0].walletpassphrase(passphrase, 2) assert_equal(privkey, self.nodes[0].dumpprivkey(address)) # Check that the timeout is right time.sleep(2) assert_raises_jsonrpc( -13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) # Test wrong passphrase assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10) # Test walletlock self.nodes[0].walletpassphrase(passphrase, 84600) assert_equal(privkey, self.nodes[0].dumpprivkey(address)) self.nodes[0].walletlock() assert_raises_jsonrpc( -13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) # Test passphrase changes self.nodes[0].walletpassphrasechange(passphrase, passphrase2) assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10) self.nodes[0].walletpassphrase(passphrase2, 10) assert_equal(privkey, self.nodes[0].dumpprivkey(address))
def reorg_back(self): # Verify that a block on the old main chain fork has been pruned away assert_raises_jsonrpc(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash) print("Will need to redownload block", self.forkheight) # Verify that we have enough history to reorg back to the fork point # Although this is more than 1440 blocks, because this chain was written more recently # and only its other 1441 small and 992 large block are in the block files after it, # its expected to still be retained self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight)) first_reorg_height = self.nodes[2].getblockcount() curchainhash = self.nodes[2].getblockhash(self.mainchainheight) self.nodes[2].invalidateblock(curchainhash) goalbestheight = self.mainchainheight goalbesthash = self.mainchainhash2 # As of 0.10 the current block download logic is not able to reorg to the original chain created in # create_chain_with_stale_blocks because it doesn't know of any peer that's on that chain from which to # redownload its missing blocks. # Invalidate the reorg_test chain in node 0 as well, it can successfully switch to the original chain # because it has all the block data. # However it must mine enough blocks to have a more work chain than the reorg_test chain in order # to trigger node 2's block download logic. # At this point node 2 is within 1440 blocks of the fork point so it will preserve its ability to reorg if self.nodes[2].getblockcount() < self.mainchainheight: blocks_to_mine = first_reorg_height + 1 - self.mainchainheight print( "Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed:", blocks_to_mine) self.nodes[0].invalidateblock(curchainhash) assert (self.nodes[0].getblockcount() == self.mainchainheight) assert (self.nodes[0].getbestblockhash() == self.mainchainhash2) goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1] goalbestheight = first_reorg_height + 1 print( "Verify node 2 reorged back to the main chain, some blocks of which it had to redownload" ) waitstart = time.time() while self.nodes[2].getblockcount() < goalbestheight: time.sleep(0.1) if time.time() - waitstart > 900: raise AssertionError("Node 2 didn't reorg to proper height") assert (self.nodes[2].getbestblockhash() == goalbesthash) # Verify we can now have the data for a block previously pruned assert (self.nodes[2].getblock( self.forkhash)["height"] == self.forkheight)
def run_test(self): self.log.info("Mining blocks...") self.nodes[0].generate(1) self.sync_all() self.nodes[1].generate(100) self.sync_all() # This transaction will be confirmed txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10) self.nodes[0].generate(1) self.sync_all() # This transaction will not be confirmed txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20) # Confirmed and unconfirmed transactions are now in the wallet. assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) # Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet. self.stop_node(0) self.start_node(0) assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) # Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed # transaction is zapped from the wallet, but is re-added when the mempool is reloaded. self.stop_node(0) self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"]) wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3) assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) # Stop node0 and restart with zapwallettxes, but not persistmempool. # The unconfirmed transaction is zapped and is no longer in the wallet. self.stop_node(0) self.start_node(0, ["-zapwallettxes=2"]) # tx1 is still be available because it was confirmed assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) # This will raise an exception because the unconfirmed transaction has been zapped assert_raises_jsonrpc(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2)
def run_test(self): node = self.nodes[0] h = node.help(command='getinfo') assert(h.startswith('getinfo\n')) assert_raises_jsonrpc(-8, 'Unknown named parameter', node.help, random='getinfo') h = node.getblockhash(height=0) node.getblock(blockhash=h) assert_equal(node.echo(), []) assert_equal(node.echo(arg0=0,arg9=9), [0] + [None]*8 + [9]) assert_equal(node.echo(arg1=1), [None, 1]) assert_equal(node.echo(arg9=None), [None]*10) assert_equal(node.echo(arg0=0,arg3=3,arg9=9), [0] + [None]*2 + [3] + [None]*5 + [9])
def run_test(self): node = self.nodes[0] h = node.help(command='getinfo') assert(h.startswith('getinfo\n')) assert_raises_jsonrpc(-8, node.help, random='getinfo') h = node.getblockhash(height=0) node.getblock(blockhash=h) assert_equal(node.echo(), []) assert_equal(node.echo(arg0=0,arg9=9), [0] + [None]*8 + [9]) assert_equal(node.echo(arg1=1), [None, 1]) assert_equal(node.echo(arg9=None), [None]*10) assert_equal(node.echo(arg0=0,arg3=3,arg9=9), [0] + [None]*2 + [3] + [None]*5 + [9])
def run_test(self): tmpdir = self.options.tmpdir # generate 20 addresses to compare against the dump test_addr_count = 20 addrs = [] for i in range(0, test_addr_count): addr = self.nodes[0].getnewaddress() vaddr = self.nodes[0].validateaddress( addr) #required to get hd keypath addrs.append(vaddr) # Should be a no-op: self.nodes[0].keypoolrefill() # dump unencrypted wallet result = self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump") assert_equal( result['filename'], os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump")) found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \ read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None) assert_equal(found_addr, test_addr_count) # all keys must be in the dump assert_equal(found_addr_chg, 50) # 50 blocks where mined assert_equal(found_addr_rsv, 90 * 2) # 90 keys plus 100% internal keys #encrypt wallet, restart, unlock and dump self.nodes[0].node_encrypt_wallet('test') self.start_node(0) self.nodes[0].walletpassphrase('test', 10) # Should be a no-op: self.nodes[0].keypoolrefill() self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump") found_addr, found_addr_chg, found_addr_rsv, _ = \ read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc) assert_equal(found_addr, test_addr_count) assert_equal(found_addr_chg, 90 * 2 + 50) # old reserve keys are marked as change now assert_equal(found_addr_rsv, 90 * 2) # Overwriting should fail assert_raises_jsonrpc(-8, "already exists", self.nodes[0].dumpwallet, tmpdir + "/node0/wallet.unencrypted.dump")
def _test_getblockheader(self): node = self.nodes[0] assert_raises_jsonrpc(-5, "Block not found", node.getblockheader, "nonsense") besthash = node.getbestblockhash() secondbesthash = node.getblockhash(199) header = node.getblockheader(besthash) assert_equal(header['hash'], besthash) assert_equal(header['height'], 200) assert_equal(header['confirmations'], 1) assert_equal(header['previousblockhash'], secondbesthash) assert_is_hex_string(header['chainwork']) assert_is_hash_string(header['hash']) assert_is_hash_string(header['previousblockhash']) assert_is_hash_string(header['merkleroot']) assert_is_hash_string(header['bits'], length=None) assert isinstance(header['time'], int) assert isinstance(header['mediantime'], int) assert isinstance(header['nonce'], int) assert isinstance(header['version'], int) assert isinstance(int(header['versionHex'], 16), int) assert isinstance(header['difficulty'], Decimal) assert_equal( header["blockstatus"]["validation_checks"], "Valid header; Parent headers are valid; Valid transactions; Valid coin spends; Valid scripts and signatures" ) assert_equal(header["blockstatus"]["validation_failures"], "") assert_equal(header["blockstatus"]["data_availablity"], "Block data available") node.invalidateblock(besthash) header = node.getblockheader(besthash) assert_equal( header["blockstatus"]["validation_failures"], "Block failed a validity check; Block descends from a failed block" ) node.reconsiderblock(besthash)
def run_test(self): passphrase = "WalletPassphrase" passphrase2 = "SecondWalletPassphrase" # Make sure the wallet isn't encrypted first address = self.nodes[0].getnewaddress() privkey = self.nodes[0].dumpprivkey(address) assert_equal(privkey[:1], "c") assert_equal(len(privkey), 52) # Encrypt the wallet self.nodes[0].encryptwallet(passphrase) self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes[0] = self.start_node(0, self.options.tmpdir) # Test that the wallet is encrypted assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) # Check that walletpassphrase works self.nodes[0].walletpassphrase(passphrase, 2) assert_equal(privkey, self.nodes[0].dumpprivkey(address)) # Check that the timeout is right time.sleep(2) assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) # Test wrong passphrase assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10) # Test walletlock self.nodes[0].walletpassphrase(passphrase, 84600) assert_equal(privkey, self.nodes[0].dumpprivkey(address)) self.nodes[0].walletlock() assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) # Test passphrase changes self.nodes[0].walletpassphrasechange(passphrase, passphrase2) assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10) self.nodes[0].walletpassphrase(passphrase2, 10) assert_equal(privkey, self.nodes[0].dumpprivkey(address))
def run_test(self): w1 = self.nodes[0] / "wallet/w1" w1.generate(1) # accessing wallet RPC without using wallet endpoint fails assert_raises_jsonrpc(-32601, "Method not found", self.nodes[0].getwalletinfo) # check w1 wallet balance w1_info = w1.getwalletinfo() assert_equal(w1_info['immature_balance'], 50) w1_name = w1_info['walletname'] assert_equal(w1_name, "w1") # check w1 wallet balance w2 = self.nodes[0] / "wallet/w2" w2_info = w2.getwalletinfo() assert_equal(w2_info['immature_balance'], 0) w2_name = w2_info['walletname'] assert_equal(w2_name, "w2") w3 = self.nodes[0] / "wallet/w3" w3_name = w3.getwalletinfo()['walletname'] assert_equal(w3_name, "w3") assert_equal({"w1", "w2", "w3"}, {w1_name, w2_name, w3_name}) w1.generate(101) assert_equal(w1.getbalance(), 100) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) w1.sendtoaddress(w2.getnewaddress(), 1) w1.sendtoaddress(w3.getnewaddress(), 2) w1.generate(1) assert_equal(w2.getbalance(), 1) assert_equal(w3.getbalance(), 2)
def run_test(self): ########################### # setban/listbanned tests # ########################### assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point self.nodes[1].setban("127.0.0.1", "add") assert wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10) assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point assert_equal(len(self.nodes[1].listbanned()), 1) self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].setban("127.0.0.0/24", "add") assert_equal(len(self.nodes[1].listbanned()), 1) # This will throw an exception because 127.0.0.1 is within range 127.0.0.0/24 assert_raises_jsonrpc(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add") # This will throw an exception because 127.0.0.1/42 is not a real subnet assert_raises_jsonrpc(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add") assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24 # This will throw an exception because 127.0.0.1 was not added above assert_raises_jsonrpc(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove") assert_equal(len(self.nodes[1].listbanned()), 1) self.nodes[1].setban("127.0.0.0/24", "remove") assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) # test persisted banlist self.nodes[1].setban("127.0.0.0/32", "add") self.nodes[1].setban("127.0.0.0/24", "add") self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds listBeforeShutdown = self.nodes[1].listbanned() assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) assert wait_until(lambda: len(self.nodes[1].listbanned()) == 3, timeout=10) stop_node(self.nodes[1], 1) self.nodes[1] = start_node(1, self.options.tmpdir) listAfterShutdown = self.nodes[1].listbanned() assert_equal("127.0.0.0/24", listAfterShutdown[0]['address']) assert_equal("127.0.0.0/32", listAfterShutdown[1]['address']) assert_equal("/19" in listAfterShutdown[2]['address'], True) # Clear ban lists self.nodes[1].clearbanned() connect_nodes_bi(self.nodes, 0, 1) ########################### # RPC disconnectnode test # ########################### address1 = self.nodes[0].getpeerinfo()[0]['addr'] self.nodes[0].disconnectnode(address=address1) assert wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] connect_nodes_bi(self.nodes, 0, 1) # reconnect the node assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
def run_test(self): node = self.nodes[0] self.log.info('getmininginfo') mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['chain'], 'regtest') assert_equal(mining_info['currentblocksize'], 0) assert_equal(mining_info['currentblocktx'], 0) assert_equal(mining_info['currentblockweight'], 0) assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10')) assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334')) assert_equal(mining_info['pooledtx'], 0) # Mine a block to leave initial block download node.generate(1) tmpl = node.getblocktemplate() self.log.info("getblocktemplate: Test capability advertised") assert 'proposal' in tmpl['capabilities'] assert 'coinbasetxn' not in tmpl coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1) # sequence numbers must not be max for nLockTime to have effect coinbase_tx.vin[0].nSequence = 2**32 - 2 coinbase_tx.rehash() block = CBlock() block.nVersion = tmpl["version"] block.hashPrevBlock = int(tmpl["previousblockhash"], 16) block.nTime = tmpl["curtime"] block.nBits = int(tmpl["bits"], 16) block.nNonce = 0 block.vtx = [coinbase_tx] self.log.info("getblocktemplate: Test valid block") assert_template(node, block, None) self.log.info("submitblock: Test block decode failure") assert_raises_jsonrpc(-22, "Block decode failed", node.submitblock, b2x(block.serialize()[:-15])) self.log.info( "getblocktemplate: Test bad input hash for coinbase transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].vin[0].prevout.hash += 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-cb-missing') self.log.info("submitblock: Test invalid coinbase transaction") assert_raises_jsonrpc(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize())) self.log.info("getblocktemplate: Test truncated final transaction") assert_raises_jsonrpc(-22, "Block decode failed", node.getblocktemplate, { 'data': b2x(block.serialize()[:-1]), 'mode': 'proposal' }) self.log.info("getblocktemplate: Test duplicate transaction") bad_block = copy.deepcopy(block) bad_block.vtx.append(bad_block.vtx[0]) assert_template(node, bad_block, 'bad-txns-duplicate') self.log.info("getblocktemplate: Test invalid transaction") bad_block = copy.deepcopy(block) bad_tx = copy.deepcopy(bad_block.vtx[0]) bad_tx.vin[0].prevout.hash = 255 bad_tx.rehash() bad_block.vtx.append(bad_tx) assert_template(node, bad_block, 'bad-txns-inputs-missingorspent') self.log.info("getblocktemplate: Test nonfinal transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].nLockTime = 2**32 - 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-txns-nonfinal') self.log.info("getblocktemplate: Test bad tx count") # The tx count is immediately after the block header TX_COUNT_OFFSET = 80 bad_block_sn = bytearray(block.serialize()) assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1) bad_block_sn[TX_COUNT_OFFSET] += 1 assert_raises_jsonrpc(-22, "Block decode failed", node.getblocktemplate, { 'data': b2x(bad_block_sn), 'mode': 'proposal' }) self.log.info("getblocktemplate: Test bad bits") bad_block = copy.deepcopy(block) bad_block.nBits = 469762303 # impossible in the real world assert_template(node, bad_block, 'bad-diffbits') self.log.info("getblocktemplate: Test bad merkle root") bad_block = copy.deepcopy(block) bad_block.hashMerkleRoot += 1 assert_template(node, bad_block, 'bad-txnmrklroot', False) self.log.info("getblocktemplate: Test bad timestamps") bad_block = copy.deepcopy(block) bad_block.nTime = 2**31 - 1 assert_template(node, bad_block, 'time-too-new') bad_block.nTime = 0 assert_template(node, bad_block, 'time-too-old') self.log.info("getblocktemplate: Test not best block") bad_block = copy.deepcopy(block) bad_block.hashPrevBlock = 123 assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
def run_test(self): self.log.info("Test setban and listbanned RPCs") self.log.info("setban: successfully ban single IP address") assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point self.nodes[1].setban("127.0.0.1", "add") wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10) assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point assert_equal(len(self.nodes[1].listbanned()), 1) self.log.info("clearbanned: successfully clear ban list") self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].setban("127.0.0.0/24", "add") self.log.info("setban: fail to ban an already banned subnet") assert_equal(len(self.nodes[1].listbanned()), 1) assert_raises_jsonrpc(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add") self.log.info("setban: fail to ban an invalid subnet") assert_raises_jsonrpc(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add") assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24 self.log.info("setban remove: fail to unban a non-banned subnet") assert_raises_jsonrpc(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove") assert_equal(len(self.nodes[1].listbanned()), 1) self.log.info("setban remove: successfully unban subnet") self.nodes[1].setban("127.0.0.0/24", "remove") assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.log.info("setban: test persistence across node restart") self.nodes[1].setban("127.0.0.0/32", "add") self.nodes[1].setban("127.0.0.0/24", "add") # Set the mocktime so we can control when bans expire old_time = int(time.time()) self.nodes[1].setmocktime(old_time) self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds listBeforeShutdown = self.nodes[1].listbanned() assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) # Move time forward by 3 seconds so the third ban has expired self.nodes[1].setmocktime(old_time + 3) assert_equal(len(self.nodes[1].listbanned()), 3) self.stop_node(1) self.start_node(1) listAfterShutdown = self.nodes[1].listbanned() assert_equal("127.0.0.0/24", listAfterShutdown[0]['address']) assert_equal("127.0.0.0/32", listAfterShutdown[1]['address']) assert_equal("/19" in listAfterShutdown[2]['address'], True) # Clear ban lists self.nodes[1].clearbanned() connect_nodes_bi(self.nodes, 0, 1) self.log.info("Test disconnectnode RPCs") self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid") address1 = self.nodes[0].getpeerinfo()[0]['addr'] node1 = self.nodes[0].getpeerinfo()[0]['addr'] assert_raises_jsonrpc(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1) self.log.info("disconnectnode: fail to disconnect when calling with junk address") assert_raises_jsonrpc(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street") self.log.info("disconnectnode: successfully disconnect node by address") address1 = self.nodes[0].getpeerinfo()[0]['addr'] self.nodes[0].disconnectnode(address=address1) wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] self.log.info("disconnectnode: successfully reconnect node") connect_nodes_bi(self.nodes, 0, 1) # reconnect the node assert_equal(len(self.nodes[0].getpeerinfo()), 2) assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] self.log.info("disconnectnode: successfully disconnect node by node id") id1 = self.nodes[0].getpeerinfo()[0]['id'] self.nodes[0].disconnectnode(nodeid=id1) wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1]
def get_tests(self): node = self.nodes[0] self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"] # First, we generate some coins to spend. node.generate(125) # Create various outputs using the OP_AND to check for activation. tx_hex = self.create_and_tx(25) txid = node.sendrawtransaction(tx_hex) assert (txid in set(node.getrawmempool())) node.generate(1) assert (txid not in set(node.getrawmempool())) # register the spendable outputs. tx = FromHex(CTransaction(), tx_hex) tx.rehash() spendable_ands = [ PreviousSpendableOutput(tx, i) for i in range(len(tx.vout)) ] def spend_and(): outpoint = spendable_ands.pop() out = outpoint.tx.vout[outpoint.n] value = int(out.nValue - (self.relayfee * COIN)) tx = CTransaction() tx.vin = [CTxIn(COutPoint(outpoint.tx.sha256, outpoint.n))] tx.vout = [CTxOut(value, CScript([]))] tx.rehash() return tx # Check that large opreturn are not accepted yet. print("Try to use the monolith opcodes before activation") tx0 = spend_and() tx0_hex = ToHex(tx0) assert_raises_jsonrpc(-26, RPC_DISABLED_OPCODE_ERROR, node.sendrawtransaction, tx0_hex) # Push MTP forward just before activation. print("Pushing MTP just before the activation and check again") node.setmocktime(MONOLITH_START_TIME) # returns a test case that asserts that the current tip was accepted def accepted(tip): return TestInstance([[tip, True]]) # returns a test case that asserts that the current tip was rejected def rejected(tip, reject=None): if reject is None: return TestInstance([[tip, False]]) else: return TestInstance([[tip, reject]]) def next_block(block_time): # get block height blockchaininfo = node.getblockchaininfo() height = int(blockchaininfo['blocks']) + 1 # create the block coinbase = create_coinbase(absoluteHeight=height) coinbase.rehash() block = create_block(int(node.getbestblockhash(), 16), coinbase, block_time) block.nVersion = 4 # Do PoW, which is cheap on regnet block.solve() return block for i in range(6): b = next_block(MONOLITH_START_TIME + i - 1) yield accepted(b) # Check again just before the activation time assert_equal( node.getblockheader(node.getbestblockhash())['mediantime'], MONOLITH_START_TIME - 1) assert_raises_jsonrpc(-26, RPC_DISABLED_OPCODE_ERROR, node.sendrawtransaction, tx0_hex) def add_tx(block, tx): block.vtx.append(tx) block.hashMerkleRoot = block.calc_merkle_root() block.solve() b = next_block(MONOLITH_START_TIME + 6) add_tx(b, tx0) yield rejected(b, RejectResult(16, b'blk-bad-inputs')) print("Activates the new opcodes") fork_block = next_block(MONOLITH_START_TIME + 6) yield accepted(fork_block) assert_equal( node.getblockheader(node.getbestblockhash())['mediantime'], MONOLITH_START_TIME) tx0id = node.sendrawtransaction(tx0_hex) assert (tx0id in set(node.getrawmempool())) # Transactions can also be included in blocks. monolithblock = next_block(MONOLITH_START_TIME + 7) add_tx(monolithblock, tx0) yield accepted(monolithblock) print("Cause a reorg that deactivate the monolith opcodes") # Invalidate the monolith block, ensure tx0 gets back to the mempool. assert (tx0id not in set(node.getrawmempool())) node.invalidateblock(format(monolithblock.sha256, 'x')) assert (tx0id in set(node.getrawmempool())) node.invalidateblock(format(fork_block.sha256, 'x')) assert (tx0id not in set(node.getrawmempool()))
def run_test(self): test_data = os.path.join(TESTSDIR, self.options.test_data) if self.options.gen_test_data: self.generate_test_data(test_data) else: self.load_test_data(test_data) self.sync_all() stats = self.get_stats() expected_stats_noindex = [] for stat_row in stats: expected_stats_noindex.append({k: v for k, v in stat_row.items() if k not in self.STATS_NEED_TXINDEX}) # Make sure all valid statistics are included but nothing else is expected_keys = self.expected_stats[0].keys() assert_equal(set(stats[0].keys()), set(expected_keys)) assert_equal(stats[0]['height'], self.start_height) assert_equal(stats[self.max_stat_pos]['height'], self.start_height + self.max_stat_pos) for i in range(self.max_stat_pos+1): logging.info('Checking block %d\n' % (i)) assert_equal(stats[i], self.expected_stats[i]) # Check selecting block by hash too blockhash = self.expected_stats[i]['blockhash'] stats_by_hash = self.nodes[0].getblockstats(hash_or_height=blockhash) assert_equal(stats_by_hash, self.expected_stats[i]) # Check with the node that has no txindex stats_no_txindex = self.nodes[1].getblockstats(hash_or_height=blockhash, stats=list(expected_stats_noindex[i].keys())) assert_equal(stats_no_txindex, expected_stats_noindex[i]) # Make sure each stat can be queried on its own for stat in expected_keys: for i in range(self.max_stat_pos+1): result = self.nodes[0].getblockstats(hash_or_height=self.start_height + i, stats=[stat]) assert_equal(list(result.keys()), [stat]) if result[stat] != self.expected_stats[i][stat]: logging.info('result[%s] (%d) failed, %r != %r' % ( stat, i, result[stat], self.expected_stats[i][stat])) assert_equal(result[stat], self.expected_stats[i][stat]) # Make sure only the selected statistics are included (more than one) some_stats = {'minfee', 'maxfee'} stats = self.nodes[0].getblockstats(hash_or_height=1, stats=list(some_stats)) assert_equal(set(stats.keys()), some_stats) # Test invalid parameters raise the proper json exceptions tip = self.start_height + self.max_stat_pos assert_raises_jsonrpc(-8, 'Target block height %d after current tip %d' % (tip+1, tip), self.nodes[0].getblockstats, hash_or_height=tip+1) assert_raises_jsonrpc(-8, 'Target block height %d is negative' % (-1), self.nodes[0].getblockstats, hash_or_height=-1) # Make sure not valid stats aren't allowed inv_sel_stat = 'asdfghjkl' inv_stats = [ [inv_sel_stat], ['minfee' , inv_sel_stat], [inv_sel_stat, 'minfee'], ['minfee', inv_sel_stat, 'maxfee'], ] for inv_stat in inv_stats: assert_raises_jsonrpc(-8, 'Invalid selected statistic %s' % inv_sel_stat, self.nodes[0].getblockstats, hash_or_height=1, stats=inv_stat) # Make sure we aren't always returning inv_sel_stat as the culprit stat assert_raises_jsonrpc(-8, 'Invalid selected statistic aaa%s' % inv_sel_stat, self.nodes[0].getblockstats, hash_or_height=1, stats=['minfee' , 'aaa%s' % inv_sel_stat]) assert_raises_jsonrpc(-8, 'One or more of the selected stats requires -txindex enabled', self.nodes[1].getblockstats, hash_or_height=self.start_height + self.max_stat_pos) # Mainchain's genesis block shouldn't be found on regtest assert_raises_jsonrpc(-5, 'Block not found', self.nodes[0].getblockstats, hash_or_height='000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')
def run_test(self): self.log.info("Test setban and listbanned RPCs") self.log.info("setban: successfully ban single IP address") assert_equal( len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point self.nodes[1].setban("127.0.0.1", "add") assert wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10) assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point assert_equal(len(self.nodes[1].listbanned()), 1) self.log.info("clearbanned: successfully clear ban list") self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].setban("127.0.0.0/24", "add") self.log.info("setban: fail to ban an already banned subnet") assert_equal(len(self.nodes[1].listbanned()), 1) assert_raises_jsonrpc(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add") self.log.info("setban: fail to ban an invalid subnet") assert_raises_jsonrpc(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add") assert_equal( len(self.nodes[1].listbanned()), 1 ) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24 self.log.info("setban remove: fail to unban a non-banned subnet") assert_raises_jsonrpc(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove") assert_equal(len(self.nodes[1].listbanned()), 1) self.log.info("setban remove: successfully unban subnet") self.nodes[1].setban("127.0.0.0/24", "remove") assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.log.info("setban: test persistence across node restart") self.nodes[1].setban("127.0.0.0/32", "add") self.nodes[1].setban("127.0.0.0/24", "add") # Set the mocktime so we can control when bans expire old_time = int(time.time()) self.nodes[1].setmocktime(old_time) self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds listBeforeShutdown = self.nodes[1].listbanned() assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) # Move time forward by 3 seconds so the third ban has expired self.nodes[1].setmocktime(old_time + 3) assert_equal(len(self.nodes[1].listbanned()), 3) self.stop_node(1) self.nodes[1] = self.start_node(1, self.options.tmpdir) listAfterShutdown = self.nodes[1].listbanned() assert_equal("127.0.0.0/24", listAfterShutdown[0]['address']) assert_equal("127.0.0.0/32", listAfterShutdown[1]['address']) assert_equal("/19" in listAfterShutdown[2]['address'], True) # Clear ban lists self.nodes[1].clearbanned() connect_nodes_bi(self.nodes, 0, 1) self.log.info("Test disconnectnode RPCs") self.log.info( "disconnectnode: fail to disconnect when calling with address and nodeid" ) address1 = self.nodes[0].getpeerinfo()[0]['addr'] node1 = self.nodes[0].getpeerinfo()[0]['addr'] assert_raises_jsonrpc( -32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1) self.log.info( "disconnectnode: fail to disconnect when calling with junk address" ) assert_raises_jsonrpc(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street") self.log.info( "disconnectnode: successfully disconnect node by address") address1 = self.nodes[0].getpeerinfo()[0]['addr'] self.nodes[0].disconnectnode(address=address1) assert wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [ node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1 ] self.log.info("disconnectnode: successfully reconnect node") connect_nodes_bi(self.nodes, 0, 1) # reconnect the node assert_equal(len(self.nodes[0].getpeerinfo()), 2) assert [ node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1 ] self.log.info( "disconnectnode: successfully disconnect node by node id") id1 = self.nodes[0].getpeerinfo()[0]['id'] self.nodes[0].disconnectnode(nodeid=id1) assert wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [ node for node in self.nodes[0].getpeerinfo() if node['id'] == id1 ]
def run_test(self): node = self.nodes[0] self.log.info('getmininginfo') mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['chain'], 'regtest') assert_equal(mining_info['currentblocksize'], 0) assert_equal(mining_info['currentblocktx'], 0) assert_equal(mining_info['currentblockweight'], 0) assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10')) assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334')) assert_equal(mining_info['pooledtx'], 0) # Mine a block to leave initial block download node.generate(1) tmpl = node.getblocktemplate() self.log.info("getblocktemplate: Test capability advertised") assert 'proposal' in tmpl['capabilities'] assert 'coinbasetxn' not in tmpl coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1) # sequence numbers must not be max for nLockTime to have effect coinbase_tx.vin[0].nSequence = 2 ** 32 - 2 coinbase_tx.rehash() block = CBlock() block.nVersion = tmpl["version"] block.hashPrevBlock = int(tmpl["previousblockhash"], 16) block.nTime = tmpl["curtime"] block.nBits = int(tmpl["bits"], 16) block.nNonce = 0 block.vtx = [coinbase_tx] self.log.info("getblocktemplate: Test valid block") assert_template(node, block, None) self.log.info("submitblock: Test block decode failure") assert_raises_jsonrpc(-22, "Block decode failed", node.submitblock, b2x(block.serialize()[:-15])) self.log.info("getblocktemplate: Test bad input hash for coinbase transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].vin[0].prevout.hash += 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-cb-missing') self.log.info("submitblock: Test invalid coinbase transaction") assert_raises_jsonrpc(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize())) self.log.info("getblocktemplate: Test truncated final transaction") assert_raises_jsonrpc(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test duplicate transaction") bad_block = copy.deepcopy(block) bad_block.vtx.append(bad_block.vtx[0]) assert_template(node, bad_block, 'bad-txns-duplicate') self.log.info("getblocktemplate: Test invalid transaction") bad_block = copy.deepcopy(block) bad_tx = copy.deepcopy(bad_block.vtx[0]) bad_tx.vin[0].prevout.hash = 255 bad_tx.rehash() bad_block.vtx.append(bad_tx) assert_template(node, bad_block, 'bad-txns-inputs-missingorspent') self.log.info("getblocktemplate: Test nonfinal transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].nLockTime = 2 ** 32 - 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-txns-nonfinal') self.log.info("getblocktemplate: Test bad tx count") # The tx count is immediately after the block header TX_COUNT_OFFSET = 80 bad_block_sn = bytearray(block.serialize()) assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1) bad_block_sn[TX_COUNT_OFFSET] += 1 assert_raises_jsonrpc(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test bad bits") bad_block = copy.deepcopy(block) bad_block.nBits = 469762303 # impossible in the real world assert_template(node, bad_block, 'bad-diffbits') self.log.info("getblocktemplate: Test bad merkle root") bad_block = copy.deepcopy(block) bad_block.hashMerkleRoot += 1 assert_template(node, bad_block, 'bad-txnmrklroot', False) self.log.info("getblocktemplate: Test bad timestamps") bad_block = copy.deepcopy(block) bad_block.nTime = 2 ** 31 - 1 assert_template(node, bad_block, 'time-too-new') bad_block.nTime = 0 assert_template(node, bad_block, 'time-too-old') self.log.info("getblocktemplate: Test not best block") bad_block = copy.deepcopy(block) bad_block.hashPrevBlock = 123 assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
def get_tests(self): node = self.nodes[0] self.genesis_hash = int(node.getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # returns a test case that asserts that the current tip was accepted def accepted(): return TestInstance([[self.tip, True]]) # returns a test case that asserts that the current tip was rejected def rejected(reject=None): if reject is None: return TestInstance([[self.tip, False]]) else: return TestInstance([[self.tip, reject]]) # move the tip back to a previous block def tip(number): self.tip = self.blocks[number] # adds transactions to the block and updates state def update_block(block_number, new_transactions=[]): block = self.blocks[block_number] self.add_transactions_to_block(block, new_transactions) old_sha256 = block.sha256 block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # shorthand for functions block = self.next_block # Create a new block block(0) save_spendable_output() yield accepted() # Now we need that block to mature so we can spend the coinbase. test = TestInstance(sync_every_block=False) for i in range(99): block(5000 + i) test.blocks_and_transactions.append([self.tip, True]) save_spendable_output() yield test # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(100): out.append(get_spendable_output()) # Let's build some blocks and test them. for i in range(15): n = i + 1 block(n) yield accepted() # Start moving MTP forward bfork = block(5555) bfork.nTime = MAGNETIC_ANOMALY_START_TIME - 1 update_block(5555) yield accepted() # Get to one block of the Nov 15, 2018 HF activation for i in range(5): block(5100 + i) test.blocks_and_transactions.append([self.tip, True]) yield test # Check that the MTP is just before the configured fork point. assert_equal(node.getblockheader(node.getbestblockhash())['mediantime'], MAGNETIC_ANOMALY_START_TIME - 1) # Check that block with small transactions and non push only signatures # are still accepted. small_tx_block = block(4444, out[0], MIN_TX_SIZE - 1, pushonly=False, cleanstack=False) # Are small txs accepted to mempool? # (it's copied to make it pass pushonly and cleanstack) small_tx = copy.deepcopy(small_tx_block.vtx[1]) small_tx.vin[0].scriptSig = CScript() small_tx.rehash() node.sendrawtransaction(ToHex(small_tx), True) assert_equal(len(small_tx_block.vtx[1].serialize()), MIN_TX_SIZE - 1) yield accepted() # Now MTP is exactly the fork time. Small transaction are now rejected. assert_equal(node.getblockheader(node.getbestblockhash())['mediantime'], MAGNETIC_ANOMALY_START_TIME) # Now that the for activated, it is not possible to have # small transactions anymore. small_tx_block = block(4445, out[1], MIN_TX_SIZE - 1) assert_equal(len(small_tx_block.vtx[1].serialize()), MIN_TX_SIZE - 1) assert_raises_jsonrpc(RPC_VERIFY_REJECTED, "bad-txns-undersize", node.sendrawtransaction, ToHex(small_tx_block.vtx[1]), True) yield rejected(RejectResult(16, b'bad-txns-undersize')) # Rewind bad block. tip(4444) # Now that the for activated, it is not possible to have # non push only transactions. non_pushonly_tx_block = block( 4446, out[1], MIN_TX_SIZE, pushonly=False) yield rejected(RejectResult(16, b'blk-bad-inputs')) # Rewind bad block. tip(4444) # Now that the for activated, it is not possible to have # non clean stack transactions. non_cleanstack_tx_block = block( 4447, out[1], MIN_TX_SIZE, cleanstack=False) yield rejected(RejectResult(16, b'blk-bad-inputs')) # Rewind bad block. tip(4444) # But large transactions are still ok. large_tx_block = block(3333, out[1], MIN_TX_SIZE) assert_equal(len(large_tx_block.vtx[1].serialize()), MIN_TX_SIZE) yield accepted()
def run_test(self): passphrase = "WalletPassphrase" passphrase2 = "SecondWalletPassphrase" # Make sure the wallet isn't encrypted first address = self.nodes[0].getnewaddress() privkey = self.nodes[0].dumpprivkey(address, self.get_otp(address)) assert_equal(privkey[:1], "c") assert_equal(len(privkey), 52) assert_raises_jsonrpc( -15, "Error: running with an unencrypted wallet, but walletpassphrase was called", self.nodes[0].walletpassphrase, 'ff', 1) assert_raises_jsonrpc( -15, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.", self.nodes[0].walletpassphrasechange, 'ff', 'ff') # Encrypt the wallet assert_raises_jsonrpc( -1, "encryptwallet <passphrase>\nEncrypts the wallet with <passphrase>.", self.nodes[0].encryptwallet, '') self.nodes[0].encryptwallet(passphrase) bitcoind_processes[0].wait() self.nodes = start_nodes(self.num_nodes, self.options.tmpdir) # Test that the wallet is encrypted otp = self.get_otp(address) assert_raises_jsonrpc( -13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address, otp) assert_raises_jsonrpc( -15, "Error: running with an encrypted wallet, but encryptwallet was called.", self.nodes[0].encryptwallet, 'ff') assert_raises_jsonrpc( -1, "walletpassphrase <passphrase> <timeout>\nStores the wallet decryption key in memory for <timeout> seconds.", self.nodes[0].walletpassphrase, '', 1) assert_raises_jsonrpc( -1, "walletpassphrasechange <oldpassphrase> <newpassphrase>\nChanges the wallet passphrase from <oldpassphrase> to <newpassphrase>.", self.nodes[0].walletpassphrasechange, '', 'ff') # Check that walletpassphrase works self.nodes[0].walletpassphrase(passphrase, 2) otp = self.get_otp(address) assert_equal(privkey, self.nodes[0].dumpprivkey(address, otp)) # Check that the timeout is right time.sleep(3) otp = self.get_otp(address) assert_raises_jsonrpc( -13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address, otp) # Test wrong passphrase assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10) # Test walletlock self.nodes[0].walletpassphrase(passphrase, 84600) otp = self.get_otp(address) assert_equal(privkey, self.nodes[0].dumpprivkey(address, otp)) self.nodes[0].walletlock() otp = self.get_otp(address) assert_raises_jsonrpc( -13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address, otp) # Test passphrase changes self.nodes[0].walletpassphrasechange(passphrase, passphrase2) assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10) self.nodes[0].walletpassphrase(passphrase2, 10) otp = self.get_otp(address) assert_equal(privkey, self.nodes[0].dumpprivkey(address, otp))
def run_test(self): self.sporkAddress = self.nodes[0].getaccountaddress("") self.mine_quorum() self.wait_for_chainlocked_block_all_nodes( self.nodes[0].getbestblockhash()) self.nodes[0].generate(1000 - self.nodes[0].getblockcount()) for i in range(0, 3): mintTxids = self.nodes[0].mintlelantus(1) for mintTxid in mintTxids: mintTx = self.nodes[0].getrawtransaction(mintTxid, 1) val = 0 for vi in mintTx['vin']: val += vi['valueSat'] if val > 10000: break val = Decimal((val - 10000) / 1e+8).quantize(Decimal('1e-7')) assert (self.wait_for_instantlock(mintTxid, self.nodes[0])) mintDspend = self.nodes[0].createrawtransaction( mintTx['vin'], {self.nodes[0].getnewaddress(): str(val)}) assert_raises_jsonrpc(-26, 'tx-txlock-conflict', self.nodes[0].sendrawtransaction, mintDspend) self.nodes[0].generate(3) assert (self.nodes[0].getrawtransaction(mintTxid, True)['confirmations'] > 0) jsplitTxid = self.nodes[0].joinsplit({self.sporkAddress: 0.1}) assert (self.wait_for_instantlock(jsplitTxid, self.nodes[0])) self.nodes[0].stop() bitcoind_processes[0].wait() self.nodes[0] = start_node(0, self.options.tmpdir, ["-zapwallettxes=1"]) for i in range(1, self.num_nodes): if i < len(self.nodes) and self.nodes[i] is not None: connect_nodes_bi(self.nodes, 0, i) jsplitTx1id = self.nodes[0].joinsplit({ self.sporkAddress: 0.11 }) # This uses the already islocked coin serial. No islock expected. self.wait_for_instantlock(jsplitTx1id, self.nodes[1], False, 5, True) jsplitTx2id = self.nodes[0].joinsplit( {self.sporkAddress: 0.11}) # This uses a new coin serial. An islock is expected. self.wait_for_instantlock(jsplitTx2id, self.nodes[1], True, 5, True) # Disabling IS self.nodes[0].importprivkey(self.sporkprivkey) self.nodes[0].spork( self.sporkprivkey, self.sporkAddress, {"disable": { "instantsend": self.nodes[0].getblockcount() + 3 }}) self.nodes[0].generate(3) sync_blocks(self.nodes) mintTxid = self.nodes[0].mintlelantus(1) self.wait_for_instantlock(mintTxid, self.nodes[0], False, 15, do_assert=True) #There should be no islock rawTxDspend = self.nodes[0].createrawtransaction( mintTx['vin'], {self.nodes[0].getnewaddress(): 0.999}) assert_raises_jsonrpc(-25, 'Missing inputs', self.nodes[0].sendrawtransaction, rawTxDspend)
def manual_test(self, node_number, use_timestamp): # at this point, node has 995 blocks and has not yet run in prune mode node = self.nodes[node_number] = start_node(node_number, self.options.tmpdir, ["-debug=0"], timewait=900) assert_equal(node.getblockcount(), 995) assert_raises_jsonrpc(-1, "not in prune mode", node.pruneblockchain, 500) self.stop_node(node_number) # now re-start in manual pruning mode node = self.nodes[node_number] = start_node(node_number, self.options.tmpdir, ["-debug=0", "-prune=1"], timewait=900) assert_equal(node.getblockcount(), 995) def height(index): if use_timestamp: return node.getblockheader( node.getblockhash(index))["time"] + RESCAN_WINDOW else: return index def prune(index, expected_ret=None): ret = node.pruneblockchain(height(index)) # Check the return value. When use_timestamp is True, just check # that the return value is less than or equal to the expected # value, because when more than one block is generated per second, # a timestamp will not be granular enough to uniquely identify an # individual block. if expected_ret is None: expected_ret = index if use_timestamp: assert_greater_than(ret, 0) assert_greater_than(expected_ret + 1, ret) else: assert_equal(ret, expected_ret) def has_block(index): return os.path.isfile(self.options.tmpdir + "/node{}/regtest/blocks/blk{:05}.dat".format( node_number, index)) # should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000) assert_raises_jsonrpc(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500)) # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight) node.generate(6) assert_equal(node.getblockchaininfo()["blocks"], 1001) # negative heights should raise an exception assert_raises_jsonrpc(-8, "Negative", node.pruneblockchain, -10) # height=100 too low to prune first block file so this is a no-op prune(100) if not has_block(0): raise AssertionError( "blk00000.dat is missing when should still be there") # Does nothing node.pruneblockchain(height(0)) if not has_block(0): raise AssertionError( "blk00000.dat is missing when should still be there") # height=141 should prune first file prune(141) if has_block(0): raise AssertionError( "blk00000.dat is still there, should be pruned by now") if not has_block(1): raise AssertionError( "blk00001.dat is missing when should still be there") # height=282 should prune second file prune(282) if has_block(1): raise AssertionError( "blk00001.dat is still there, should be pruned by now") # height=1000 should not prune anything more, because tip-288 is in blk00002.dat. prune(1000) if has_block(2): raise AssertionError( "blk00002.dat is still there, should be pruned by now") # advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat) node.generate(288) prune(1000) for fnum in range(7): if has_block(fnum): raise AssertionError( f"blk0000{fnum}.dat is still there, should be pruned by now" ) if not has_block(7): raise AssertionError( "blk00007.dat is missing when should still be there") # stop node, start back up with auto-prune at 2200MB, make sure still runs self.stop_node(node_number) self.nodes[node_number] = start_node(node_number, self.options.tmpdir, ["-debug=0", "-prune=2200"], timewait=900) print("Success")