def erase_three(self): os.remove( os.path.join(self.nodes[0].datadir, NetworkDirName(), 'wallets', 'wallet.dat')) os.remove( os.path.join(self.nodes[1].datadir, NetworkDirName(), 'wallets', 'wallet.dat')) os.remove( os.path.join(self.nodes[2].datadir, NetworkDirName(), 'wallets', 'wallet.dat'))
def setup_network(self): self.setup_nodes() self.prunedir = os.path.join(self.nodes[2].datadir, NetworkDirName(), 'blocks', '') connect_nodes(self.nodes[0], 1) connect_nodes(self.nodes[1], 2) connect_nodes(self.nodes[2], 0) connect_nodes(self.nodes[0], 3) connect_nodes(self.nodes[0], 4) sync_blocks(self.nodes[0:5])
def run_test(self): self.stop_node(0) shutil.rmtree(self.nodes[0].datadir) initialize_datadir(self.options.tmpdir, 0) self.writeGenesisBlockToFile(self.nodes[0].datadir) self.log.info("Starting with non exiting blocksdir ...") blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir') self.nodes[0].assert_start_raises_init_error( ["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format( blocksdir_path)) os.mkdir(blocksdir_path) self.log.info("Starting with exiting blocksdir ...") self.start_node(0, ["-blocksdir=" + blocksdir_path]) self.log.info("mining blocks..") self.nodes[0].generate(10, self.signblockprivkey) assert os.path.isfile( os.path.join(blocksdir_path, NetworkDirName(), "blocks", "blk00000.dat")) assert os.path.isdir( os.path.join(self.nodes[0].datadir, NetworkDirName(), "blocks", "index"))
def read_logs(tmp_dir): """Reads log files. Delegates to generator function get_log_events() to provide individual log events for each of the input log files.""" files = [("test", "%s/test_framework.log" % tmp_dir)] for i in itertools.count(): logfile = "{}/node{}/{}/debug.log".format(tmp_dir, i, NetworkDirName()) if not os.path.isfile(logfile): break files.append(("node%d" % i, logfile)) return heapq.merge(*[get_log_events(source, f) for source, f in files])
def run_test(self): wallet_path = os.path.join(self.nodes[1].datadir, NetworkDirName(), "wallets", "wallet.dat") wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak") self.nodes[0].generate(1, self.signblockprivkey_wif) self.log.info("Make backup of wallet") self.stop_node(1) shutil.copyfile(wallet_path, wallet_backup_path) self.start_node(1, self.extra_args[1]) connect_nodes_bi(self.nodes, 0, 1) self.log.info("Generate keys for wallet") for _ in range(90): addr_oldpool = self.nodes[1].getnewaddress() for _ in range(20): addr_extpool = self.nodes[1].getnewaddress() self.log.info("Send funds to wallet") self.nodes[0].sendtoaddress(addr_oldpool, 10) self.nodes[0].generate(1, self.signblockprivkey_wif) self.nodes[0].sendtoaddress(addr_extpool, 5) self.nodes[0].generate(1, self.signblockprivkey_wif) sync_blocks(self.nodes) self.log.info("Restart node with wallet backup") self.stop_node(1) shutil.copyfile(wallet_backup_path, wallet_path) self.start_node(1, self.extra_args[1]) connect_nodes_bi(self.nodes, 0, 1) self.sync_all() self.log.info("Verify keypool is restored and balance is correct") assert_equal(self.nodes[1].getbalance(), 15) assert_equal(self.nodes[1].listtransactions()[0]['category'], "receive") # Check that we have marked all keys up to the used keypool key as used assert_equal( self.nodes[1].getaddressinfo( self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
def run_test(self): # Make sure can't switch off usehd after wallet creation self.stop_node(1) self.nodes[1].assert_start_raises_init_error(['-usehd=0'], "Error: Error loading : You can't disable HD on an already existing HD wallet") self.start_node(1) connect_nodes_bi(self.nodes, 0, 1) # Make sure we use hd, keep masterkeyid masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] assert_equal(masterkeyid, self.nodes[1].getwalletinfo()['hdmasterkeyid']) assert_equal(len(masterkeyid), 40) # create an internal key change_addr = self.nodes[1].getrawchangeaddress() change_addrV= self.nodes[1].getaddressinfo(change_addr) assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key # Import a non-HD private key in the HD wallet non_hd_add = self.nodes[0].getnewaddress() self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add)) # This should be enough to keep the master key and the non-HD key self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak")) #self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump")) # Derive some HD addresses and remember the last # Also send funds to each add self.nodes[0].generate(1, self.signblockprivkey) hd_add = None NUM_HD_ADDS = 10 for i in range(NUM_HD_ADDS): hd_add = self.nodes[1].getnewaddress() hd_info = self.nodes[1].getaddressinfo(hd_add) assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'") assert_equal(hd_info["hdseedid"], masterkeyid) assert_equal(hd_info["hdmasterkeyid"], masterkeyid) self.nodes[0].sendtoaddress(hd_add, 1) self.nodes[0].generate(1, self.signblockprivkey) self.nodes[0].sendtoaddress(non_hd_add, 1) self.nodes[0].generate(1, self.signblockprivkey) # create an internal key (again) change_addr = self.nodes[1].getrawchangeaddress() change_addrV= self.nodes[1].getaddressinfo(change_addr) assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key self.sync_all() assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) self.log.info("Restore backup ...") self.stop_node(1) # we need to delete the complete regtest directory # otherwise node1 would auto-recover all funds in flag the keypool keys as used shutil.rmtree(os.path.join(self.nodes[1].datadir, NetworkDirName(), "blocks")) shutil.rmtree(os.path.join(self.nodes[1].datadir, NetworkDirName(), "chainstate")) shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, NetworkDirName(), "wallets", "wallet.dat")) self.start_node(1) # Assert that derivation is deterministic hd_add_2 = None for i in range(NUM_HD_ADDS): hd_add_2 = self.nodes[1].getnewaddress() hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2) assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(i)+"'") assert_equal(hd_info_2["hdseedid"], masterkeyid) assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid) assert_equal(hd_add, hd_add_2) connect_nodes_bi(self.nodes, 0, 1) self.sync_all() # Needs rescan self.stop_node(1) self.start_node(1, extra_args=self.extra_args[1] + ['-rescan']) assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) # Try a RPC based rescan self.stop_node(1) shutil.rmtree(os.path.join(self.nodes[1].datadir, NetworkDirName(), "blocks")) shutil.rmtree(os.path.join(self.nodes[1].datadir, NetworkDirName(), "chainstate")) shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, NetworkDirName(), "wallets", "wallet.dat")) self.start_node(1, extra_args=self.extra_args[1]) connect_nodes_bi(self.nodes, 0, 1) self.sync_all() # Wallet automatically scans blocks older than key on startup assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) out = self.nodes[1].rescanblockchain(0, 1) assert_equal(out['start_height'], 0) assert_equal(out['stop_height'], 1) out = self.nodes[1].rescanblockchain() assert_equal(out['start_height'], 0) assert_equal(out['stop_height'], self.nodes[1].getblockcount()) assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) # send a tx and make sure its using the internal chain for the changeoutput txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout'] keypath = "" for out in outs: if out['value'] != 1: keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath'] assert_equal(keypath[0:7], "m/0'/1'") # Generate a new HD seed on node 1 and make sure it is set orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] self.nodes[1].sethdseed() new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] assert orig_masterkeyid != new_masterkeyid addr = self.nodes[1].getnewaddress() assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is the first from the keypool self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key # Set a new HD seed on node 1 without flushing the keypool new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress()) orig_masterkeyid = new_masterkeyid self.nodes[1].sethdseed(False, new_seed) new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] assert orig_masterkeyid != new_masterkeyid addr = self.nodes[1].getnewaddress() assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid']) assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'') # Make sure the new address continues previous keypool # Check that the next address is from the new seed self.nodes[1].keypoolrefill(1) next_addr = self.nodes[1].getnewaddress() assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid']) assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is not from previous keypool assert next_addr != addr # Sethdseed parameter validity assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0) assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif") assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool") assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True) assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed) assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
def run_test(self): self.log.info("Mining blocks...") self.nodes[0].generate(1, self.signblockprivkey_wif) addr_LEGACY1 = self.nodes[0].getnewaddress("") pubk1 = self.nodes[0].getaddressinfo(addr_LEGACY1)['pubkey'] addr_LEGACY2 = self.nodes[0].getnewaddress("") pubk2 = self.nodes[0].getaddressinfo(addr_LEGACY2)['pubkey'] addr_LEGACY3 = self.nodes[0].getnewaddress("") pubk3 = self.nodes[0].getaddressinfo(addr_LEGACY3)['pubkey'] self.nodes[0].sendtoaddress(addr_LEGACY1, 0.001) self.nodes[0].sendtoaddress(addr_LEGACY2, 0.002) self.nodes[0].sendtoaddress(addr_LEGACY3, 0.004) #send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK self.nodes[0].sendtoaddress("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 0.008) # (m/0'/0'/0') self.nodes[0].sendtoaddress("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 0.016) # (m/0'/0'/1') self.nodes[0].sendtoaddress("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 0.032) # (m/0'/0'/1500') self.nodes[0].sendtoaddress("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 0.064) # (m/0'/0'/0) self.nodes[0].sendtoaddress("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 0.128) # (m/0'/0'/1) self.nodes[0].sendtoaddress("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 0.256) # (m/0'/0'/1500) self.nodes[0].sendtoaddress("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 0.512) # (m/1/1/0') self.nodes[0].sendtoaddress("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1.024) # (m/1/1/1') self.nodes[0].sendtoaddress("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2.048) # (m/1/1/1500') self.nodes[0].sendtoaddress("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4.096) # (m/1/1/0) self.nodes[0].sendtoaddress("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8.192) # (m/1/1/1) self.nodes[0].sendtoaddress("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16.384) # (m/1/1/1500) self.nodes[0].generate(1, self.signblockprivkey_wif) self.log.info("Stop node, remove wallet, mine again some blocks...") self.stop_node(0) if(os.path.exists(os.path.join(self.nodes[0].datadir, NetworkDirName(), 'wallets'))): shutil.rmtree(os.path.join(self.nodes[0].datadir, NetworkDirName(), 'wallets')) self.start_node(0) self.nodes[0].generate(1, self.signblockprivkey_wif) self.restart_node(0, ['-nowallet']) self.log.info("Test if we have found the non HD unspent outputs.") assert_equal(self.nodes[0].scantxoutset("start", [ "pkh(" + pubk1 + ")", "pkh(" + pubk2 + ")", "pkh(" + pubk3 + ")"])['total_amount'], {'TPC': Decimal('0.00700000')}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(" + pubk1 + ")", "combo(" + pubk2 + ")", "combo(" + pubk3 + ")"])['total_amount'], {'TPC': Decimal('0.007')}) assert_equal(self.nodes[0].scantxoutset("start", [ "addr(" + addr_LEGACY1 + ")", "addr(" + addr_LEGACY2 + ")", "addr(" + addr_LEGACY3 + ")"])['total_amount'], {'TPC': Decimal('0.007')}) assert_equal(self.nodes[0].scantxoutset("start", [ "addr(" + addr_LEGACY1 + ")", "addr(" + addr_LEGACY2 + ")", "combo(" + pubk3 + ")"])['total_amount'], {'TPC': Decimal('0.007')}) self.log.info("Test extended key derivation.") assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"])['total_amount'], {'TPC': Decimal('0.008')}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"])['total_amount'], {'TPC': Decimal('0.016')}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"])['total_amount'], {'TPC': Decimal("0.032")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"])['total_amount'], {'TPC': Decimal("0.064")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"])['total_amount'], {'TPC': Decimal("0.128")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"])['total_amount'], {'TPC': Decimal("0.256")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499}])['total_amount'], {'TPC': Decimal("0.024")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500}])['total_amount'], {'TPC': Decimal("0.056")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])['total_amount'], {'TPC': Decimal("0.192")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500}])['total_amount'], {'TPC': Decimal("0.448")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"])['total_amount'], {'TPC': Decimal("0.512")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"])['total_amount'], {'TPC': Decimal("1.024")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"])['total_amount'], {'TPC': Decimal("2.048")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])['total_amount'], {'TPC': Decimal("4.096")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"])['total_amount'], {'TPC': Decimal("8.192")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"])['total_amount'], {'TPC': Decimal("16.384")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"])['total_amount'], {'TPC': Decimal("4.096")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"])['total_amount'], {'TPC': Decimal("8.192")}) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"])['total_amount'], {'TPC': Decimal("16.384")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1499}])['total_amount'], {'TPC': Decimal("1.536")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1500}])['total_amount'], {'TPC': Decimal("3.584")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1499}])['total_amount'], {'TPC': Decimal("12.288")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1500}])['total_amount'], {'TPC': Decimal("28.672")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499}])['total_amount'], {'TPC': Decimal("12.288")}) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])['total_amount'], {'TPC': Decimal("28.672")})
def run_test(self): self.stop_node(0) self.test_config_file_parser() self.log.info("-dnsseeder and -dnsseed tests") # -dnsseeder and -dnsseed tests self.start_node(0, ['-addseeder=seed.tapyrus.dev.chaintope.com','-addseeder=static-seed.tapyrus.dev.chaintope.com']) self.stop_node(0) self.nodes[0].assert_start_raises_init_error(['-addseeder=seed.tapyrus.dev.chaintope.com','-addseeder=static-seed.tapyrus.dev.chaintope.com', '-dnsseed=0'], 'Error: DNS seeding is disabled. But DNS seeders are configured in -addseeder.') self.start_node(0, ['-addseeder=seed.tapyrus.dev.chaintope.com','-addseeder=static-seed.tapyrus.dev.chaintope.com', '-dnsseed=1']) self.stop_node(0) self.log.info("-networkid tests") conf_file = os.path.join(self.nodes[0].datadir, "tapyrus.conf") #backup config file conf_file_contents = open(conf_file, encoding='utf8').read() # conf file with -networkid = 4 with open(conf_file, 'w', encoding='utf8') as f: f.write("networkid=4\n") f.write("rpcuser=rpcuser\n") f.write("rpcpassword=pass\n") f.write(conf_file_contents) self.writeGenesisBlockToFile(self.nodes[0].datadir, networkid=4) self.start_node(0, ['-datadir=%s' % self.nodes[0].datadir]) assert os.path.exists(os.path.join(self.nodes[0].datadir, "dev-4", "blocks")) self.stop_node(0) # -networkid = 10 cmd line parameter #os.mkdir(os.path.join(self.nodes[0].datadir,"dev-10")) self.writeGenesisBlockToFile(self.nodes[0].datadir, networkid=10) self.start_node(0, ['-networkid=10', '-datadir=%s' % self.nodes[0].datadir]) assert os.path.exists(os.path.join(self.nodes[0].datadir, "dev-10", "blocks")) self.stop_node(0) #restore config file with open(conf_file, 'w', encoding='utf8') as f: f.write(conf_file_contents) self.log.info("-datadir tests") # Remove the -datadir argument so it doesn't override the config file self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")] self.writeGenesisBlockToFile(self.nodes[0].datadir) default_data_dir = self.nodes[0].datadir new_data_dir = os.path.join(default_data_dir, 'newdatadir') new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2') # Check that using -datadir argument on non-existent directory fails self.nodes[0].datadir = new_data_dir self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.') # datadir needs to be set before [regtest] section with open(conf_file, 'w', encoding='utf8') as f: f.write("datadir=" + new_data_dir + "\n") f.write(conf_file_contents) # Temporarily disabled, because this test would access the user's home dir (~/.bitcoin) #self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.') # Create the directory and ensure the config file now works os.mkdir(new_data_dir) # Temporarily disabled, because this test would access the user's home dir (~/.bitcoin) #self.start_node(0, ['-conf='+conf_file, '-wallet=w1']) #self.stop_node(0) #assert os.path.exists(os.path.join(new_data_dir, 'regtest', 'wallets', 'w1')) # Ensure command line argument overrides datadir in conf os.mkdir(new_data_dir_2) self.writeGenesisBlockToFile(new_data_dir_2) self.nodes[0].datadir = new_data_dir_2 self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file, '-wallet=w2']) assert os.path.exists(os.path.join(new_data_dir_2, NetworkDirName(), 'wallets', 'w2')) self.stop_node(0)
def has_block(index): return os.path.isfile(os.path.join(self.nodes[node_number].datadir, NetworkDirName(), "blocks", "blk{:05}.dat".format(index)))
def run_test(self): node = self.nodes[0] # convenience reference to the node self.address = node.getnewaddress() node.add_p2p_connection(P2PDataStore()) node.p2p.wait_for_getheaders(timeout=5) self.log.info("Test starting...") #genesis block (B0) self.blocks = [self.genesisBlock.hash] block_time = self.genesisBlock.nTime # Create a new blocks B1 - B10 self.blocks += node.generate(1, self.aggprivkey_wif[0]) create_colored_transaction(2, 100, node) self.blocks += node.generate(9, self.aggprivkey_wif[0]) best_block = node.getblock(node.getbestblockhash()) self.tip = node.getbestblockhash() self.log.info("First federation block") # B11 - Create block - aggpubkey2 - sign with aggpubkey1 block_time = best_block["time"] + 1 blocknew = create_block(int(self.tip, 16), create_coinbase(11), block_time, self.aggpubkeys[1]) blocknew.solve(self.aggprivkey[0]) self.blocks += [blocknew.hash] node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.tip = self.blocks[-1] assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B -- Create block with invalid aggpubkey2 - sign with aggpubkey1 -- failure - invalid aggpubkey aggpubkeyInv = self.aggpubkeys[-1][:-2] block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(12), block_time, aggpubkeyInv) blocknew.solve(self.aggprivkey[0]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) # B - Create block - sign with aggpubkey1 - failure - Proof verification failed block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(12), block_time) blocknew.solve(self.aggprivkey[0]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) # B12 - Create block - sign with aggpubkey2 blocknew.solve(self.aggprivkey[1]) self.blocks += [blocknew.hash] node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.tip = self.blocks[-1] assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) # Create a new blocks B13 - B22 self.blocks += node.generate(10, self.aggprivkey_wif[1]) best_block = node.getblock(node.getbestblockhash()) self.tip = node.getbestblockhash() #B23 -- Create block with 1 valid transaction - sign with aggpubkey2 -- success block_time = best_block["time"] + 1 blocknew = create_block(int(self.tip, 16), create_coinbase(23), block_time) spendHash = node.getblock(self.blocks[2])['tx'][1] vout = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( spendHash, 1)["vout"]) if vout["value"] != 100) rawtx = node.createrawtransaction(inputs=[{ "txid": spendHash, "vout": vout }], outputs={self.address: 1.0}) signresult = node.signrawtransactionwithwallet(rawtx, [], "ALL", self.options.scheme) assert_equal(signresult["complete"], True) tx = CTransaction() tx.deserialize(BytesIO(hex_str_to_bytes(signresult['hex']))) blocknew.vtx += [tx] blocknew.hashMerkleRoot = blocknew.calc_merkle_root() blocknew.hashImMerkleRoot = blocknew.calc_immutable_merkle_root() blocknew.solve(self.aggprivkey[1]) self.blocks.append(blocknew.hash) self.tip = blocknew.hash node.submitblock(bytes_to_hex_str(blocknew.serialize())) assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #call invalidate block rpc on B23 -- success - B23 is removed from the blockchain. tip is B22 node.invalidateblock(self.tip) self.tip = self.blocks[22] assert_equal(self.tip, node.getbestblockhash()) #B23 -- Re Create a new block B23 -- success block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(23), block_time) spendHash = node.getblock(self.blocks[2])['tx'][0] blocknew.vtx += [ create_transaction(node, spendHash, self.address, amount=49.0) ] blocknew.hashMerkleRoot = blocknew.calc_merkle_root() blocknew.hashImMerkleRoot = blocknew.calc_immutable_merkle_root() blocknew.solve(self.aggprivkey[1]) self.blocks[22] = blocknew.hash self.tip = blocknew.hash node.submitblock(bytes_to_hex_str(blocknew.serialize())) assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B -- - Create block with 1 invalid transaction - sign with aggpubkey2 -- failure block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(23), block_time) spendHash = node.getblock(self.blocks[3])['tx'][0] blocknew.vtx += [ create_transaction(node, spendHash, self.address, amount=100.0) ] #invalid blocknew.hashMerkleRoot = blocknew.calc_merkle_root() blocknew.hashImMerkleRoot = blocknew.calc_immutable_merkle_root() blocknew.solve(self.aggprivkey[1]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) #B -- - Create block with 1 invalid transaction and aggpubkey3 - sign with aggpubkey2 -- failure and aggpubkey3 is not added to the list blocknew = create_block(int(self.tip, 16), create_coinbase(23), block_time, self.aggpubkeys[2]) spendHash = node.getblock(self.blocks[4])['tx'][0] blocknew.vtx += [ create_transaction(node, spendHash, self.address, amount=100.0) ] #invalid blocknew.hashMerkleRoot = blocknew.calc_merkle_root() blocknew.hashImMerkleRoot = blocknew.calc_immutable_merkle_root() blocknew.solve(self.aggprivkey[1]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) # verify aggpubkey3 is not added to the list : verify that block signed using aggprivkey3 is rejected blocknew = create_block(int(self.tip, 16), create_coinbase(24), block_time, self.aggpubkeys[2]) blocknew.solve(self.aggprivkey[2]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) self.log.info("Second federation block") #B24 -- Create block with 1 valid transaction and aggpubkey3- sign with aggpubkey2 -- success and aggpubkey3 is added to the list block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(24), block_time, self.aggpubkeys[2]) spendHash = node.getblock(self.blocks[4])['tx'][0] blocknew.vtx += [ create_transaction(node, spendHash, self.address, amount=10.0) ] blocknew.hashMerkleRoot = blocknew.calc_merkle_root() blocknew.hashImMerkleRoot = blocknew.calc_immutable_merkle_root() blocknew.solve(self.aggprivkey[1]) self.blocks.append(blocknew.hash) self.tip = blocknew.hash node.submitblock(bytes_to_hex_str(blocknew.serialize())) assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B25 -- Create block with 1 valid transaction - sign with aggpubkey3 -- success block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(25), block_time) spendHash = node.getblock(self.blocks[5])['tx'][0] blocknew.vtx += [ create_transaction(node, spendHash, self.address, amount=10.0) ] blocknew.hashMerkleRoot = blocknew.calc_merkle_root() blocknew.hashImMerkleRoot = blocknew.calc_immutable_merkle_root() blocknew.solve(self.aggprivkey[2]) self.blocks.append(blocknew.hash) self.tip = blocknew.hash b25 = blocknew.hash node.submitblock(bytes_to_hex_str(blocknew.serialize())) assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) # Create a new blocks B26 - B30 self.blocks += node.generate(5, self.aggprivkey_wif[2]) best_block = node.getblock(node.getbestblockhash()) self.tip = node.getbestblockhash() self.log.info("Verifying getblockchaininfo") #getblockchaininfo expectedAggPubKeys = [{ self.aggpubkeys[0]: 0 }, { self.aggpubkeys[1]: 12 }, { self.aggpubkeys[2]: 25 }] blockchaininfo = node.getblockchaininfo() assert_equal(blockchaininfo["aggregatePubkeys"], expectedAggPubKeys) self.log.info( "Simulate Blockchain Reorg - After the last federation block") #B27 -- Create block with previous block hash = B26 - sign with aggpubkey3 -- success - block is accepted but there is no re-org block_time += 1 self.forkblocks = self.blocks blocknew = create_block(int(self.blocks[26], 16), create_coinbase(27), block_time) blocknew.solve(self.aggprivkey[2]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.forkblocks[27] = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B28 -- Create block with previous block hash = B27 - sign with aggpubkey3 -- success - block is accepted but there is no re-org block_time += 1 blocknew = create_block(int(self.forkblocks[27], 16), create_coinbase(28), block_time) blocknew.solve(self.aggprivkey[2]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.forkblocks[28] = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B29 -- Create block with previous block hash = B28 - sign with aggpubkey3 -- success - block is accepted but there is no re-org block_time += 1 blocknew = create_block(int(self.forkblocks[28], 16), create_coinbase(29), block_time) blocknew.solve(self.aggprivkey[2]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.forkblocks[29] = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B30 -- Create block with previous block hash = B29 - sign with aggpubkey3 -- success - block is accepted but there is no re-org block_time += 1 blocknew = create_block(int(self.forkblocks[29], 16), create_coinbase(30), block_time) blocknew.solve(self.aggprivkey[2]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.forkblocks[30] = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B31 -- Create block with previous block hash = B30 - sign with aggpubkey3 -- success - block is accepted and re-org happens block_time += 1 blocknew = create_block(int(self.forkblocks[30], 16), create_coinbase(31), block_time) blocknew.solve(self.aggprivkey[2]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.forkblocks.append(blocknew.hash) self.tip = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) assert_equal(blockchaininfo["aggregatePubkeys"], expectedAggPubKeys) self.log.info( "Simulate Blockchain Reorg - Before the last federation block") #B24 -- Create block with previous block hash = B23 - sign with aggpubkey2 -- failure - block is in invalid chain block_time += 1 blocknew = create_block(int(self.blocks[23], 16), create_coinbase(24), block_time) blocknew.solve(self.aggprivkey[1]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B25 -- Create block with previous block hash = B24 - sign with aggpubkey2 -- success - block is in invalid chain block_time += 1 blocknew = create_block(int(self.blocks[24], 16), create_coinbase(25), block_time) blocknew.solve(self.aggprivkey[1]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #there are 3 tips in the current blockchain chaintips = node.getchaintips() assert_equal(len(chaintips), 3) assert (node.getblock(self.blocks[12])) assert (node.getblock(self.blocks[25])) assert_raises_rpc_error(-5, "Block not found", node.getblock, blocknew.hash) self.log.info("Third Federation Block - active chain") #B32 -- Create block with aggpubkey4 - sign with aggpubkey3 -- success - aggpubkey4 is added to the list block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(32), block_time, self.aggpubkeys[3]) blocknew.solve(self.aggprivkey[2]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.forkblocks.append(blocknew.hash) self.tip = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B -- Create block - sign with aggpubkey2 -- failure - proof verification failed block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(33), block_time) blocknew.solve(self.aggprivkey[1]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) #B -- Create block - sign with aggpubkey3 -- failure - proof verification failed blocknew.solve(self.aggprivkey[2]) assert_equal(node.submitblock(bytes_to_hex_str(blocknew.serialize())), "invalid") assert_equal(self.tip, node.getbestblockhash()) #B33 -- Create block - sign with aggpubkey4 -- success blocknew.solve(self.aggprivkey[3]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.forkblocks.append(blocknew.hash) self.tip = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B34 - B35 -- Generate 2 blocks - no aggpubkey -- chain becomes longer self.forkblocks += node.generate(2, self.aggprivkey_wif[3]) self.tip = self.forkblocks[35] assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) self.log.info("Fourth Federation Block") #B36 -- Create block with aggpubkey5 - sign using aggpubkey4 -- success - aggpubkey5 is added to the list block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(36), block_time, self.aggpubkeys[4]) blocknew.solve(self.aggprivkey[3]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.tip = blocknew.hash self.forkblocks.append(blocknew.hash) assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #call invalidate block rpc on B36 -- failure - B36 is a federation block assert_raises_rpc_error(-8, "Federation block found", node.invalidateblock, self.tip) assert_raises_rpc_error(-8, "Federation block found", node.invalidateblock, self.forkblocks[33]) assert_raises_rpc_error(-8, "Federation block found", node.invalidateblock, self.blocks[29]) assert_equal(self.tip, node.getbestblockhash()) #B37 - Create block - sign using aggpubkey5 -- success block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(37), block_time) blocknew.solve(self.aggprivkey[4]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.tip = blocknew.hash self.forkblocks.append(blocknew.hash) assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) self.log.info("Verifying getblockchaininfo") #getblockchaininfo expectedAggPubKeys = [ { self.aggpubkeys[0]: 0 }, { self.aggpubkeys[1]: 12 }, { self.aggpubkeys[2]: 25 }, { self.aggpubkeys[3]: 33 }, { self.aggpubkeys[4]: 37 }, ] blockchaininfo = node.getblockchaininfo() assert_equal(blockchaininfo["aggregatePubkeys"], expectedAggPubKeys) #B38 - B40 -- Generate 2 blocks - no aggpubkey -- chain becomes longer self.forkblocks += node.generate(3, self.aggprivkey_wif[4]) self.tip = node.getbestblockhash() best_block = node.getblock(self.tip) self.log.info("Test Repeated aggpubkeys in Federation Block") #B41 -- Create block with aggpubkey0 - sign using aggpubkey5 -- success - aggpubkey0 is added to the list block_time = best_block["time"] + 1 blocknew = create_block(int(self.tip, 16), create_coinbase(41), block_time, self.aggpubkeys[0]) blocknew.solve(self.aggprivkey[4]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.tip = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) #B42 -- Create block with aggpubkey1 - sign using aggpubkey0 -- success - aggpubkey1 is added to the list block_time += 1 blocknew = create_block(int(self.tip, 16), create_coinbase(42), block_time, self.aggpubkeys[1]) blocknew.solve(self.aggprivkey[0]) node.submitblock(bytes_to_hex_str(blocknew.serialize())) self.tip = blocknew.hash assert_equal(self.tip, node.getbestblockhash()) assert (node.getblock(self.tip)) self.log.info("Verifying getblockchaininfo") #getblockchaininfo expectedAggPubKeys = [ { self.aggpubkeys[0]: 0 }, { self.aggpubkeys[1]: 12 }, { self.aggpubkeys[2]: 25 }, { self.aggpubkeys[3]: 33 }, { self.aggpubkeys[4]: 37 }, { self.aggpubkeys[0]: 42 }, { self.aggpubkeys[1]: 43 }, ] blockchaininfo = node.getblockchaininfo() assert_equal(blockchaininfo["aggregatePubkeys"], expectedAggPubKeys) self.stop_node(0) self.log.info("Restarting node with '-reindex-chainstate'") self.start_node(0, extra_args=["-reindex-chainstate"]) self.sync_all() self.stop_node(0) self.log.info("Restarting node with '-loadblock'") shutil.copyfile( os.path.join(self.nodes[0].datadir, NetworkDirName(), 'blocks', 'blk00000.dat'), os.path.join(self.nodes[0].datadir, 'blk00000.dat')) os.remove( os.path.join(self.nodes[0].datadir, NetworkDirName(), 'blocks', 'blk00000.dat')) extra_args = [ "-loadblock=%s" % os.path.join(self.nodes[0].datadir, 'blk00000.dat'), "-reindex" ] self.start_node(0, extra_args)
def run_test(self): chain_height = self.nodes[0].getblockcount() assert_equal(chain_height, 100) self.log.debug("Mine a single block to get out of IBD") self.nodes[0].generate(1, self.signblockprivkey) self.sync_all() self.log.debug("Send 5 transactions from node2 (to its own address)") for i in range(5): self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10")) node2_balance = self.nodes[2].getbalance() self.sync_all() self.log.debug( "Verify that node0 and node1 have 5 transactions in their mempools" ) assert_equal(len(self.nodes[0].getrawmempool()), 5) assert_equal(len(self.nodes[1].getrawmempool()), 5) self.log.debug( "Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions." ) self.stop_nodes() # Give this node a head-start, so we can be "extra-sure" that it didn't load anything later # Also don't store the mempool, to keep the datadir clean self.start_node(1, extra_args=["-persistmempool=0"]) self.start_node(0) self.start_node(2) # Give bitcoind a second to reload the mempool wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5, timeout=1) wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5, timeout=1) # The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now: assert_equal(len(self.nodes[1].getrawmempool()), 0) # Verify accounting of mempool transactions after restart is correct self.nodes[2].syncwithvalidationinterfacequeue( ) # Flush mempool to wallet assert_equal(node2_balance, self.nodes[2].getbalance()) self.log.debug( "Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file." ) self.stop_nodes() self.start_node(0, extra_args=["-persistmempool=0"]) # Give bitcoind a second to reload the mempool time.sleep(1) assert_equal(len(self.nodes[0].getrawmempool()), 0) self.log.debug( "Stop-start node0. Verify that it has the transactions in its mempool." ) self.stop_nodes() self.start_node(0) wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5) mempooldat0 = os.path.join(self.nodes[0].datadir, NetworkDirName(), 'mempool.dat') mempooldat1 = os.path.join(self.nodes[1].datadir, NetworkDirName(), 'mempool.dat') self.log.debug( "Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it" ) os.remove(mempooldat0) self.nodes[0].savemempool() assert os.path.isfile(mempooldat0) self.log.debug( "Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions" ) os.rename(mempooldat0, mempooldat1) self.stop_nodes() self.start_node(1, extra_args=[]) wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5) self.log.debug( "Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails" ) # to test the exception we are creating a tmp folder called mempool.dat.new # which is an implementation detail that could change and break this test mempooldotnew1 = mempooldat1 + '.new' os.mkdir(mempooldotnew1) assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool) os.rmdir(mempooldotnew1)
def relative_log_path(self, name): return os.path.join(self.nodes[0].datadir, NetworkDirName(), name)
def run_test(self): self.log.info("Generating initial blockchain") self.nodes[0].generate(1, self.signblockprivkey) sync_blocks(self.nodes) self.nodes[1].generate(1, self.signblockprivkey) sync_blocks(self.nodes) self.nodes[2].generate(1, self.signblockprivkey) sync_blocks(self.nodes) self.nodes[3].generate(1, self.signblockprivkey) sync_blocks(self.nodes) assert_equal(self.nodes[0].getbalance(), 50) assert_equal(self.nodes[1].getbalance(), 50) assert_equal(self.nodes[2].getbalance(), 50) assert_equal(self.nodes[3].getbalance(), 50) self.log.info("Creating transactions") # Five rounds of sending each other transactions. for i in range(5): self.do_one_round() self.log.info("Backing up") self.nodes[0].backupwallet( os.path.join(self.nodes[0].datadir, 'wallet.bak')) self.nodes[0].dumpwallet( os.path.join(self.nodes[0].datadir, 'wallet.dump')) self.nodes[1].backupwallet( os.path.join(self.nodes[1].datadir, 'wallet.bak')) self.nodes[1].dumpwallet( os.path.join(self.nodes[1].datadir, 'wallet.dump')) self.nodes[2].backupwallet( os.path.join(self.nodes[2].datadir, 'wallet.bak')) self.nodes[2].dumpwallet( os.path.join(self.nodes[2].datadir, 'wallet.dump')) self.log.info("More transactions") for i in range(5): self.do_one_round() balance0 = self.nodes[0].getbalance() balance1 = self.nodes[1].getbalance() balance2 = self.nodes[2].getbalance() balance3 = self.nodes[3].getbalance() total = balance0 + balance1 + balance2 + balance3 # At this point, there are 214 blocks (4 for setup, then 10 rounds.) # 14 are mature, so the sum of all wallets should be 14 * 50 = 700. assert_equal(total, 700) ## # Test restoring spender wallets from backups ## self.log.info("Restoring using wallet.dat") self.stop_three() self.erase_three() # Start node2 with no chain shutil.rmtree( os.path.join(self.nodes[2].datadir, NetworkDirName(), 'blocks')) shutil.rmtree( os.path.join(self.nodes[2].datadir, NetworkDirName(), 'chainstate')) # Restore wallets from backup shutil.copyfile( os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[0].datadir, NetworkDirName(), 'wallets', 'wallet.dat')) shutil.copyfile( os.path.join(self.nodes[1].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, NetworkDirName(), 'wallets', 'wallet.dat')) shutil.copyfile( os.path.join(self.nodes[2].datadir, 'wallet.bak'), os.path.join(self.nodes[2].datadir, NetworkDirName(), 'wallets', 'wallet.dat')) self.log.info("Re-starting nodes") self.start_three() sync_blocks(self.nodes) assert_equal(self.nodes[0].getbalance(), balance0) assert_equal(self.nodes[1].getbalance(), balance1) assert_equal(self.nodes[2].getbalance(), balance2) self.log.info("Restoring using dumped wallet") self.stop_three() self.erase_three() #start node2 with no chain shutil.rmtree( os.path.join(self.nodes[2].datadir, NetworkDirName(), 'blocks')) shutil.rmtree( os.path.join(self.nodes[2].datadir, NetworkDirName(), 'chainstate')) self.start_three() assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[1].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 0) self.nodes[0].importwallet( os.path.join(self.nodes[0].datadir, 'wallet.dump')) self.nodes[1].importwallet( os.path.join(self.nodes[1].datadir, 'wallet.dump')) self.nodes[2].importwallet( os.path.join(self.nodes[2].datadir, 'wallet.dump')) sync_blocks(self.nodes) assert_equal(self.nodes[0].getbalance(), balance0) assert_equal(self.nodes[1].getbalance(), balance1) assert_equal(self.nodes[2].getbalance(), balance2) # Backup to source wallet file must fail sourcePaths = [ os.path.join(self.nodes[0].datadir, NetworkDirName(), 'wallets', 'wallet.dat'), os.path.join(self.nodes[0].datadir, NetworkDirName(), '.', 'wallets', 'wallet.dat'), os.path.join(self.nodes[0].datadir, NetworkDirName(), 'wallets', ''), os.path.join(self.nodes[0].datadir, NetworkDirName(), 'wallets') ] for sourcePath in sourcePaths: assert_raises_rpc_error(-4, "backup failed", self.nodes[0].backupwallet, sourcePath)
def run_test(self): node = self.nodes[0] data_dir = lambda *p: os.path.join(node.datadir, NetworkDirName(), *p) wallet_dir = lambda *p: data_dir('wallets', *p) wallet = lambda name: node.get_wallet_rpc(name) def wallet_file(name): if os.path.isdir(wallet_dir(name)): return wallet_dir(name, "wallet.dat") return wallet_dir(name) # check wallet.dat is created self.stop_nodes() assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True) # create symlink to verify wallet directory path can be referenced # through symlink os.mkdir(wallet_dir('w7')) os.symlink('w7', wallet_dir('w7_symlink')) # rename wallet.dat to make sure plain wallet file paths (as opposed to # directory paths) can be loaded os.rename(wallet_dir("wallet.dat"), wallet_dir("w8")) # create another dummy wallet for use in testing backups later self.start_node(0, []) self.stop_nodes() empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat') os.rename(wallet_dir("wallet.dat"), empty_wallet) # restart node with a mix of wallet names: # w1, w2, w3 - to verify new wallets created when non-existing paths specified # w - to verify wallet name matching works when one wallet path is prefix of another # sub/w5 - to verify relative wallet path is created correctly # extern/w6 - to verify absolute wallet path is created correctly # w7_symlink - to verify symlinked wallet path is initialized correctly # w8 - to verify existing wallet file is loaded correctly # '' - to verify default wallet file is created correctly wallet_names = [ 'w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', '' ] extra_args = ['-wallet={}'.format(n) for n in wallet_names] self.start_node(0, extra_args) assert_equal(set(node.listwallets()), set(wallet_names)) # check that all requested wallets were created self.stop_node(0) for wallet_name in wallet_names: assert_equal(os.path.isfile(wallet_file(wallet_name)), True) # should not initialize if wallet path can't be created exp_stderr = "boost::filesystem::create_directory: (The system cannot find the path specified|Not a directory):" self.nodes[0].assert_start_raises_init_error( ['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) self.nodes[0].assert_start_raises_init_error( ['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist') self.nodes[0].assert_start_raises_init_error( ['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir()) self.nodes[0].assert_start_raises_init_error( ['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir()) # should not initialize if there are duplicate wallets self.nodes[0].assert_start_raises_init_error([ '-wallet=w1', '-wallet=w1' ], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.' ) # should not initialize if one wallet is a copy of another shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy')) exp_stderr = "BerkeleyBatch: Can't open database w8_copy \(duplicates fileid \w+ from w8\)" self.nodes[0].assert_start_raises_init_error( ['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) # should not initialize if wallet file is a symlink os.symlink('w8', wallet_dir('w8_symlink')) self.nodes[0].assert_start_raises_init_error( ['-wallet=w8_symlink'], 'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX) # should not initialize if the specified walletdir does not exist self.nodes[0].assert_start_raises_init_error( ['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist') # should not initialize if the specified walletdir is not a directory not_a_dir = wallet_dir('notadir') open(not_a_dir, 'a', encoding="utf8").close() self.nodes[0].assert_start_raises_init_error( ['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory') self.log.info("Do not allow -zapwallettxes with multiwallet") self.nodes[0].assert_start_raises_init_error( ['-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") self.nodes[0].assert_start_raises_init_error( ['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") self.nodes[0].assert_start_raises_init_error( ['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") self.log.info("Do not allow -salvagewallet with multiwallet") self.nodes[0].assert_start_raises_init_error( ['-salvagewallet', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file") self.nodes[0].assert_start_raises_init_error( ['-salvagewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file") self.log.info("Do not allow -upgradewallet with multiwallet") self.nodes[0].assert_start_raises_init_error( ['-upgradewallet', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file") self.nodes[0].assert_start_raises_init_error( ['-upgradewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file") # if wallets/ doesn't exist, datadir should be the default wallet dir wallet_dir2 = data_dir('walletdir') os.rename(wallet_dir(), wallet_dir2) self.start_node(0, ['-wallet=w4', '-wallet=w5']) assert_equal(set(node.listwallets()), {"w4", "w5"}) w5 = wallet("w5") w5.generate(1, self.signblockprivkey) # now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded os.rename(wallet_dir2, wallet_dir()) self.restart_node( 0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()]) assert_equal(set(node.listwallets()), {"w4", "w5"}) w5 = wallet("w5") w5_info = w5.getwalletinfo() assert_equal(w5_info['balance'], 50) competing_wallet_dir = os.path.join(self.options.tmpdir, 'competing_walletdir') os.mkdir(competing_wallet_dir) self.restart_node(0, ['-walletdir=' + competing_wallet_dir]) exp_stderr = "Error: Error initializing wallet database environment \"\S+competing_walletdir\"!" self.nodes[1].assert_start_raises_init_error( ['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) self.restart_node(0, extra_args) wallets = [wallet(w) for w in wallet_names] wallet_bad = wallet("bad") # check wallet names and balances wallets[0].generate(1, self.signblockprivkey) for wallet_name, wallet in zip(wallet_names, wallets): info = wallet.getwalletinfo() assert_equal(info['balance'], 50 if wallet is wallets[0] else 0) assert_equal(info['walletname'], wallet_name) # accessing invalid wallet fails assert_raises_rpc_error( -18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo) # accessing wallet RPC without using wallet endpoint fails assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) w1, w2, w3, w4, *_ = wallets w1.generate(1, self.signblockprivkey) assert_equal(w1.getbalance(), 100) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) assert_equal(w4.getbalance(), 0) w1.sendtoaddress(w2.getnewaddress(), 1) w1.sendtoaddress(w3.getnewaddress(), 2) w1.sendtoaddress(w4.getnewaddress(), 3) w1.generate(1, self.signblockprivkey) assert_equal(w2.getbalance(), 1) assert_equal(w3.getbalance(), 2) assert_equal(w4.getbalance(), 3) batch = w1.batch([ w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request() ]) assert_equal(batch[0]["result"]["chain"], "1905960821") #chain name is networkid assert_equal(batch[1]["result"]["walletname"], "w1") self.log.info('Check for per-wallet settxfee call') assert_equal(w1.getwalletinfo()['paytxfee'], 0) assert_equal(w2.getwalletinfo()['paytxfee'], 0) w2.settxfee(4.0) assert_equal(w1.getwalletinfo()['paytxfee'], 0) assert_equal(w2.getwalletinfo()['paytxfee'], 4.0) self.log.info("Test dynamic wallet loading") self.restart_node(0, ['-nowallet']) assert_equal(node.listwallets(), []) assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo) self.log.info("Load first wallet") loadwallet_name = node.loadwallet(wallet_names[0]) assert_equal(loadwallet_name['name'], wallet_names[0]) assert_equal(node.listwallets(), wallet_names[0:1]) node.getwalletinfo() w1 = node.get_wallet_rpc(wallet_names[0]) w1.getwalletinfo() self.log.info("Load second wallet") loadwallet_name = node.loadwallet(wallet_names[1]) assert_equal(loadwallet_name['name'], wallet_names[1]) assert_equal(node.listwallets(), wallet_names[0:2]) assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) w2 = node.get_wallet_rpc(wallet_names[1]) w2.getwalletinfo() self.log.info("Load remaining wallets") for wallet_name in wallet_names[2:]: loadwallet_name = self.nodes[0].loadwallet(wallet_name) assert_equal(loadwallet_name['name'], wallet_name) assert_equal(set(self.nodes[0].listwallets()), set(wallet_names)) # Fail to load if wallet doesn't exist assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets') # Fail to load duplicate wallets assert_raises_rpc_error( -4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0]) # Fail to load if one wallet is a copy of another assert_raises_rpc_error( -1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy') # Fail to load if wallet file is a symlink assert_raises_rpc_error( -4, "Wallet file verification failed: Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink') # Fail to load if a directory is specified that doesn't contain a wallet os.mkdir(wallet_dir('empty_wallet_dir')) assert_raises_rpc_error( -18, "Directory empty_wallet_dir does not contain a wallet.dat file", self.nodes[0].loadwallet, 'empty_wallet_dir') self.log.info("Test dynamic wallet creation.") # Fail to create a wallet if it already exists. assert_raises_rpc_error(-4, "Wallet w2 already exists.", self.nodes[0].createwallet, 'w2') # Successfully create a wallet with a new name loadwallet_name = self.nodes[0].createwallet('w9') assert_equal(loadwallet_name['name'], 'w9') w9 = node.get_wallet_rpc('w9') assert_equal(w9.getwalletinfo()['walletname'], 'w9') assert 'w9' in self.nodes[0].listwallets() # Successfully create a wallet using a full path new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir') new_wallet_name = os.path.join(new_wallet_dir, 'w10') loadwallet_name = self.nodes[0].createwallet(new_wallet_name) assert_equal(loadwallet_name['name'], new_wallet_name) w10 = node.get_wallet_rpc(new_wallet_name) assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name) assert new_wallet_name in self.nodes[0].listwallets() self.log.info("Test dynamic wallet unloading") # Test `unloadwallet` errors assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].unloadwallet) assert_raises_rpc_error( -18, "Requested wallet does not exist or is not loaded", self.nodes[0].unloadwallet, "dummy") assert_raises_rpc_error( -18, "Requested wallet does not exist or is not loaded", node.get_wallet_rpc("dummy").unloadwallet) assert_raises_rpc_error(-8, "Cannot unload the requested wallet", w1.unloadwallet, "w2"), # Successfully unload the specified wallet name self.nodes[0].unloadwallet("w1") assert 'w1' not in self.nodes[0].listwallets() # Successfully unload the wallet referenced by the request endpoint w2.unloadwallet() assert 'w2' not in self.nodes[0].listwallets() # Successfully unload all wallets for wallet_name in self.nodes[0].listwallets(): self.nodes[0].unloadwallet(wallet_name) assert_equal(self.nodes[0].listwallets(), []) assert_raises_rpc_error( -32601, "Method not found (wallet method is disabled because no wallet is loaded)", self.nodes[0].getwalletinfo) # Successfully load a previously unloaded wallet self.nodes[0].loadwallet('w1') assert_equal(self.nodes[0].listwallets(), ['w1']) assert_equal(w1.getwalletinfo()['walletname'], 'w1') # Test backing up and restoring wallets self.log.info("Test wallet backup") self.restart_node(0, ['-nowallet']) for wallet_name in wallet_names: self.nodes[0].loadwallet(wallet_name) for wallet_name in wallet_names: rpc = self.nodes[0].get_wallet_rpc(wallet_name) addr = rpc.getnewaddress() backup = os.path.join(self.options.tmpdir, 'backup.dat') rpc.backupwallet(backup) self.nodes[0].unloadwallet(wallet_name) shutil.copyfile(empty_wallet, wallet_file(wallet_name)) self.nodes[0].loadwallet(wallet_name) assert_equal(rpc.getaddressinfo(addr)['ismine'], False) self.nodes[0].unloadwallet(wallet_name) shutil.copyfile(backup, wallet_file(wallet_name)) self.nodes[0].loadwallet(wallet_name) assert_equal(rpc.getaddressinfo(addr)['ismine'], True)
def run_test(self): test_data = os.path.join(TESTSDIR, self.options.test_data) genesis_block = os.path.join(TESTSDIR, self.options.genesis_block) if self.options.gen_test_data: self.log.info("Generating new genesis block to %s " % genesis_block) with open(genesis_block, 'w', encoding="utf8") as f: f.write(bytes_to_hex_str(self.genesisBlock.serialize())) self.log.info("Generating new test data to %s " % test_data) self.generate_test_data(test_data) else: self.log.info("Loading genesis block from %s" % genesis_block) for i in range (0, self.num_nodes): self.stop_node(i) shutil.rmtree(os.path.join(self.nodes[i].datadir, NetworkDirName())) initialize_datadir(self.options.tmpdir, 0) shutil.copyfile(genesis_block, os.path.join(self.nodes[i].datadir, "genesis.dat")) self.start_node(i) connect_nodes_bi(self.nodes, 0, 1) self.log.info("Loading test data from %s" % test_data) self.load_test_data(test_data) self.sync_all() stats = self.get_stats() expected_stats_noindex = [] for stat_row in stats: expected_stats_noindex.append({k: v for k, v in stat_row.items() if k not in self.STATS_NEED_TXINDEX}) # Make sure all valid statistics are included but nothing else is expected_keys = self.expected_stats[0].keys() assert_equal(set(stats[0].keys()), set(expected_keys)) assert_equal(stats[0]['height'], self.start_height) assert_equal(stats[self.max_stat_pos]['height'], self.start_height + self.max_stat_pos) for i in range(self.max_stat_pos+1): self.log.info('Checking block %d\n' % (i)) assert_equal(stats[i], self.expected_stats[i]) # Check selecting block by hash too blockhash = self.expected_stats[i]['blockhash'] stats_by_hash = self.nodes[0].getblockstats(hash_or_height=blockhash) assert_equal(stats_by_hash, self.expected_stats[i]) # Check with the node that has no txindex stats_no_txindex = self.nodes[1].getblockstats(hash_or_height=blockhash, stats=list(expected_stats_noindex[i].keys())) assert_equal(stats_no_txindex, expected_stats_noindex[i]) # Make sure each stat can be queried on its own for stat in expected_keys: for i in range(self.max_stat_pos+1): result = self.nodes[0].getblockstats(hash_or_height=self.start_height + i, stats=[stat]) assert_equal(list(result.keys()), [stat]) if result[stat] != self.expected_stats[i][stat]: self.log.info('result[%s] (%d) failed, %r != %r' % ( stat, i, result[stat], self.expected_stats[i][stat])) assert_equal(result[stat], self.expected_stats[i][stat]) # Make sure only the selected statistics are included (more than one) some_stats = {'minfee', 'maxfee'} stats = self.nodes[0].getblockstats(hash_or_height=1, stats=list(some_stats)) assert_equal(set(stats.keys()), some_stats) # Test invalid parameters raise the proper json exceptions tip = self.start_height + self.max_stat_pos assert_raises_rpc_error(-8, 'Target block height %d after current tip %d' % (tip+1, tip), self.nodes[0].getblockstats, hash_or_height=tip+1) assert_raises_rpc_error(-8, 'Target block height %d is negative' % (-1), self.nodes[0].getblockstats, hash_or_height=-1) # Make sure not valid stats aren't allowed inv_sel_stat = 'asdfghjkl' inv_stats = [ [inv_sel_stat], ['minfee' , inv_sel_stat], [inv_sel_stat, 'minfee'], ['minfee', inv_sel_stat, 'maxfee'], ] for inv_stat in inv_stats: assert_raises_rpc_error(-8, 'Invalid selected statistic %s' % inv_sel_stat, self.nodes[0].getblockstats, hash_or_height=1, stats=inv_stat) # Make sure we aren't always returning inv_sel_stat as the culprit stat assert_raises_rpc_error(-8, 'Invalid selected statistic aaa%s' % inv_sel_stat, self.nodes[0].getblockstats, hash_or_height=1, stats=['minfee' , 'aaa%s' % inv_sel_stat]) assert_raises_rpc_error(-8, 'One or more of the selected stats requires -txindex enabled', self.nodes[1].getblockstats, hash_or_height=self.start_height + self.max_stat_pos) # Mainchain's genesis block shouldn't be found on regtest assert_raises_rpc_error(-5, 'Block not found', self.nodes[0].getblockstats, hash_or_height='000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')