def transact_and_mine(self, numblocks, mining_node): min_fee = Decimal("0.00001") # We will now mine numblocks blocks generating on average 100 transactions between each block # We shuffle our confirmed txout set before each set of transactions # small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible # resorting to tx's that depend on the mempool when those run out for i in range(numblocks): random.shuffle(self.confutxo) for j in range(random.randrange(100 - 50, 100 + 50)): from_index = random.randint(1, 2) (txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo, self.memutxo, Decimal("0.005"), min_fee, min_fee) tx_kbytes = (len(txhex) // 2) / 1000.0 self.fees_per_kb.append(float(fee) / tx_kbytes) sync_mempools(self.nodes[0:3], wait=.1) mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"] sync_blocks(self.nodes[0:3], wait=.1) # update which txouts are confirmed newmem = [] for utx in self.memutxo: if utx["txid"] in mined: self.confutxo.append(utx) else: newmem.append(utx) self.memutxo = newmem
def transact_and_mine(self, numblocks, mining_node): min_fee = Decimal("0.00001") # We will now mine numblocks blocks generating on average 100 transactions between each block # We shuffle our confirmed txout set before each set of transactions # small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible # resorting to tx's that depend on the mempool when those run out for i in range(numblocks): random.shuffle(self.confutxo) # ELEMENTS: make fewer txns since larger: ~236 bytes: 69k/4/234=~73 # Pick a number smaller than that, stingy miner is even stingier for j in range(random.randrange(55 - 15, 55 + 15)): from_index = random.randint(1, 2) (txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo, self.memutxo, Decimal("0.005"), min_fee, min_fee) tx_kbytes = (len(txhex) // 2) / 1000.0 self.fees_per_kb.append(float(fee) / tx_kbytes) sync_mempools(self.nodes[0:3], wait=10, timeout=240) # Slower to sync than btc mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"] sync_blocks(self.nodes[0:3], wait=.1) # update which txouts are confirmed newmem = [] for utx in self.memutxo: if utx["txid"] in mined: self.confutxo.append(utx) else: newmem.append(utx) self.memutxo = newmem
def send_transaction(self, testnode, block, address, expiry_height): tx = create_transaction(self.nodes[0], block, address, 10.0, expiry_height) testnode.send_message(msg_tx(tx)) # Sync up with node after p2p messages delivered testnode.sync_with_ping() # Sync nodes 0 and 1 sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) return tx
def do_one_round(self): a0 = self.nodes[0].getnewaddress() a1 = self.nodes[1].getnewaddress() a2 = self.nodes[2].getnewaddress() self.one_send(0, a1) self.one_send(0, a2) self.one_send(1, a0) self.one_send(1, a2) self.one_send(2, a0) self.one_send(2, a1) # Have the miner (node3) mine a block. # Must sync mempools before mining. sync_mempools(self.nodes) self.nodes[3].generate(1)
def run_test(self): node1 = self.nodes[1] node0 = self.nodes[0] # Get out of IBD node1.generate(1) sync_blocks(self.nodes) self.nodes[0].add_p2p_connection(TestP2PConn()) # Test that invs are received for all txs at feerate of 20 sat/byte node1.settxfee(Decimal("0.00020000")) txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] assert(allInvsMatch(txids, self.nodes[0].p2p)) self.nodes[0].p2p.clear_invs() # Set a filter of 15 sat/byte self.nodes[0].p2p.send_and_ping(msg_feefilter(15000)) # Test that txs are still being received (paying 20 sat/byte) txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] assert(allInvsMatch(txids, self.nodes[0].p2p)) self.nodes[0].p2p.clear_invs() # Change tx fee rate to 10 sat/byte and test they are no longer received node1.settxfee(Decimal("0.00010000")) [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] sync_mempools(self.nodes) # must be sure node 0 has received all txs # Send one transaction from node0 that should be received, so that we # we can sync the test on receipt (if node1's txs were relayed, they'd # be received by the time this node0 tx is received). This is # unfortunately reliant on the current relay behavior where we batch up # to 35 entries in an inv, which means that when this next transaction # is eligible for relay, the prior transactions from node1 are eligible # as well. node0.settxfee(Decimal("0.00020000")) txids = [node0.sendtoaddress(node0.getnewaddress(), 1)] assert(allInvsMatch(txids, self.nodes[0].p2p)) self.nodes[0].p2p.clear_invs() # Remove fee filter and check that txs are received again self.nodes[0].p2p.send_and_ping(msg_feefilter(0)) txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] assert(allInvsMatch(txids, self.nodes[0].p2p)) self.nodes[0].p2p.clear_invs()
def test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address): rbfid = spend_one_input(rbf_node, dest_address) rbftx = rbf_node.gettransaction(rbfid) sync_mempools((rbf_node, peer_node)) assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool() bumped_tx = rbf_node.bumpfee(rbfid) assert_equal(bumped_tx["errors"], []) assert bumped_tx["fee"] - abs(rbftx["fee"]) > 0 # check that bumped_tx propagates, original tx was evicted and has a wallet conflict sync_mempools((rbf_node, peer_node)) assert bumped_tx["txid"] in rbf_node.getrawmempool() assert bumped_tx["txid"] in peer_node.getrawmempool() assert rbfid not in rbf_node.getrawmempool() assert rbfid not in peer_node.getrawmempool() oldwtx = rbf_node.gettransaction(rbfid) assert len(oldwtx["walletconflicts"]) > 0 # check wallet transaction replaces and replaced_by values bumpedwtx = rbf_node.gettransaction(bumped_tx["txid"]) assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"]) assert_equal(bumpedwtx["replaces_txid"], rbfid)
def run_test(self): # Mine some blocks and have them mature. self.nodes[0].generate(101) utxo = self.nodes[0].listunspent(10) txid = utxo[0]['txid'] vout = utxo[0]['vout'] value = utxo[0]['amount'] fee = Decimal("0.0001") # MAX_ANCESTORS transactions off a confirmed tx should be fine chain = [] for i in range(MAX_ANCESTORS): (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1) value = sent_value chain.append(txid) # Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor # count and fees should look correct mempool = self.nodes[0].getrawmempool(True) assert_equal(len(mempool), MAX_ANCESTORS) descendant_count = 1 descendant_fees = 0 descendant_size = 0 ancestor_size = sum([mempool[tx]['size'] for tx in mempool]) ancestor_count = MAX_ANCESTORS ancestor_fees = sum([mempool[tx]['fee'] for tx in mempool]) descendants = [] ancestors = list(chain) for x in reversed(chain): # Check that getmempoolentry is consistent with getrawmempool entry = self.nodes[0].getmempoolentry(x) assert_equal(entry, mempool[x]) # Check that the descendant calculations are correct assert_equal(mempool[x]['descendantcount'], descendant_count) descendant_fees += mempool[x]['fee'] assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']) assert_equal(mempool[x]['fees']['base'], mempool[x]['fee']) assert_equal(mempool[x]['fees']['modified'], mempool[x]['modifiedfee']) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN) assert_equal(mempool[x]['fees']['descendant'], descendant_fees) descendant_size += mempool[x]['size'] assert_equal(mempool[x]['descendantsize'], descendant_size) descendant_count += 1 # Check that ancestor calculations are correct assert_equal(mempool[x]['ancestorcount'], ancestor_count) assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN) assert_equal(mempool[x]['ancestorsize'], ancestor_size) ancestor_size -= mempool[x]['size'] ancestor_fees -= mempool[x]['fee'] ancestor_count -= 1 # Check that parent/child list is correct assert_equal(mempool[x]['spentby'], descendants[-1:]) assert_equal(mempool[x]['depends'], ancestors[-2:-1]) # Check that getmempooldescendants is correct assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x))) # Check getmempooldescendants verbose output is correct for descendant, dinfo in self.nodes[0].getmempooldescendants(x, True).items(): assert_equal(dinfo['depends'], [chain[chain.index(descendant)-1]]) if dinfo['descendantcount'] > 1: assert_equal(dinfo['spentby'], [chain[chain.index(descendant)+1]]) else: assert_equal(dinfo['spentby'], []) descendants.append(x) # Check that getmempoolancestors is correct ancestors.remove(x) assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x))) # Check that getmempoolancestors verbose output is correct for ancestor, ainfo in self.nodes[0].getmempoolancestors(x, True).items(): assert_equal(ainfo['spentby'], [chain[chain.index(ancestor)+1]]) if ainfo['ancestorcount'] > 1: assert_equal(ainfo['depends'], [chain[chain.index(ancestor)-1]]) else: assert_equal(ainfo['depends'], []) # Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True) assert_equal(len(v_ancestors), len(chain)-1) for x in v_ancestors.keys(): assert_equal(mempool[x], v_ancestors[x]) assert(chain[-1] not in v_ancestors.keys()) v_descendants = self.nodes[0].getmempooldescendants(chain[0], True) assert_equal(len(v_descendants), len(chain)-1) for x in v_descendants.keys(): assert_equal(mempool[x], v_descendants[x]) assert(chain[0] not in v_descendants.keys()) # Check that ancestor modified fees includes fee deltas from # prioritisetransaction self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=1000) mempool = self.nodes[0].getrawmempool(True) ancestor_fees = 0 for x in chain: ancestor_fees += mempool[x]['fee'] assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees + Decimal('0.00001')) assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN + 1000) # Undo the prioritisetransaction for later tests self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=-1000) # Check that descendant modified fees includes fee deltas from # prioritisetransaction self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=1000) mempool = self.nodes[0].getrawmempool(True) descendant_fees = 0 for x in reversed(chain): descendant_fees += mempool[x]['fee'] assert_equal(mempool[x]['fees']['descendant'], descendant_fees + Decimal('0.00001')) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000) # Adding one more transaction on to the chain should fail. assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1) # Check that prioritising a tx before it's added to the mempool works # First clear the mempool by mining a block. self.nodes[0].generate(1) sync_blocks(self.nodes) assert_equal(len(self.nodes[0].getrawmempool()), 0) # Prioritise a transaction that has been mined, then add it back to the # mempool by using invalidateblock. self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=2000) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Keep node1's tip synced with node0 self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) # Now check that the transaction is in the mempool, with the right modified fee mempool = self.nodes[0].getrawmempool(True) descendant_fees = 0 for x in reversed(chain): descendant_fees += mempool[x]['fee'] if (x == chain[-1]): assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002)) assert_equal(mempool[x]['fees']['modified'], mempool[x]['fee']+satoshi_round(0.00002)) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000) assert_equal(mempool[x]['fees']['descendant'], descendant_fees+satoshi_round(0.00002)) # TODO: check that node1's mempool is as expected # TODO: test ancestor size limits # Now test descendant chain limits txid = utxo[1]['txid'] value = utxo[1]['amount'] vout = utxo[1]['vout'] transaction_package = [] tx_children = [] # First create one parent tx with 10 children (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10) parent_transaction = txid for i in range(10): transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value}) # Sign and send up to MAX_DESCENDANT transactions chained off the parent tx for i in range(MAX_DESCENDANTS - 1): utxo = transaction_package.pop(0) (txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) if utxo['txid'] is parent_transaction: tx_children.append(txid) for j in range(10): transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value}) mempool = self.nodes[0].getrawmempool(True) assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS) assert_equal(sorted(mempool[parent_transaction]['spentby']), sorted(tx_children)) for child in tx_children: assert_equal(mempool[child]['depends'], [parent_transaction]) # Sending one more chained transaction will fail utxo = transaction_package.pop(0) assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) # TODO: check that node1's mempool is as expected # TODO: test descendant size limits # Test reorg handling # First, the basics: self.nodes[0].generate(1) sync_blocks(self.nodes) self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash()) self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash()) # Now test the case where node1 has a transaction T in its mempool that # depends on transactions A and B which are in a mined block, and the # block containing A and B is disconnected, AND B is not accepted back # into node1's mempool because its ancestor count is too high. # Create 8 transactions, like so: # Tx0 -> Tx1 (vout0) # \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7 # # Mine them in the next block, then generate a new tx8 that spends # Tx1 and Tx7, and add to node1's mempool, then disconnect the # last block. # Create tx0 with 2 outputs utxo = self.nodes[0].listunspent() txid = utxo[0]['txid'] value = utxo[0]['amount'] vout = utxo[0]['vout'] send_value = satoshi_round((value - fee)/2) inputs = [ {'txid' : txid, 'vout' : vout} ] outputs = {} for i in range(2): outputs[self.nodes[0].getnewaddress()] = send_value rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) tx0_id = txid value = send_value # Create tx1 tx1_id, _ = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1) # Create tx2-7 vout = 1 txid = tx0_id for i in range(6): (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1) vout = 0 value = sent_value # Mine these in a block self.nodes[0].generate(1) self.sync_all() # Now generate tx8, with a big fee inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ] outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) sync_mempools(self.nodes) # Now try to disconnect the tip on each node... self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) sync_blocks(self.nodes)
def test_mix_contract_transaction_fork(self, gen_blocks=False): ''' 在2条分叉链中,混合执行各种交易,然后: 1.不产生块,合并网络 2.产生块,合并网络 :return: ''' self.sync_all() self.node1.generate(2) assert_equal(self.node1.getrawmempool(), []) # make sure mempool empty assert_equal(self.node0.getrawmempool(), []) # make sure mempool empty ct = Contract(self.node0, self.options.tmpdir, debug=False) ct2 = Contract(self.node0, self.options.tmpdir, debug=False) ct2.call_payable(amount=1000) print(ct.publish_txid) self.sync_all() self.node0.generate(2) self.sync_all() blocks_num = self.node0.getblockcount() # split mgc network self.split_network() self.node0.generate(2) # fork self.node2.generate(8) # fork self.make_more_work_than(2, 0) #make sure nod2 more than node0 balances = [n.getbalance() for n in self.nodes] # in group 1 # normal transaction sendtxs_a = [ self.node0.sendtoaddress(self.node3.getnewaddress(), 1000) for i in range(5) ] # publish contract transaction ccontracts_a = [ Contract(self.node0, self.options.tmpdir, debug=False) for i in range(5) ] # call contract transaction call_contract_txs_a = [ ct.call_payable(amount=1000).txid for ct in ccontracts_a ] call_contract_txs_a1 = [ ct.call_callOtherContractTest(ccontracts_a[0].contract_id, 'callOtherContractTest', ccontracts_a[-1].contract_id, "contractDataTest").txid for ct in ccontracts_a ] # long mempool chain transaction for i in range(8): result = ccontracts_a[1].call_reentrancyTest(throw_exception=False) ccontracts_a[2].call_maxContractCallTest(2).txid self.sync_all([self.nodes[:2], self.nodes[2:]]) # in group 2 sendtxs_b = [ self.node2.sendtoaddress(self.node1.getnewaddress(), 1000) for i in range(5) ] # publish contract transaction ccontracts_b = [ Contract(self.node2, self.options.tmpdir, debug=False) for i in range(5) ] # call contract transaction call_contract_txs_b = [ ct.call_payable(amount=1000).txid for ct in ccontracts_b ] call_contract_txs_b1 = [ ct.call_callOtherContractTest(ccontracts_b[0].contract_id, 'callOtherContractTest', ccontracts_b[-1].contract_id, "contractDataTest").txid for ct in ccontracts_b ] # long mempool chain transaction for i in range(8): result = ccontracts_b[1].call_reentrancyTest(throw_exception=False) ccontracts_b[2].call_maxContractCallTest(2).txid self.sync_all([self.nodes[:2], self.nodes[2:]]) # join network if gen_blocks: for i in range(4): print("before make_more_work_than:", i, self.nodes[i].getblockcount(), int(self.nodes[i].getchaintipwork(), 16)) print("mempool:", self.nodes[i].getrawmempool()) blocks_a = self.node0.generate(2) blocks_b = self.node2.generate(8) more_work_blocks = self.make_more_work_than(2, 0) for i in range(4): print("before join:", i, self.nodes[i].getblockcount(), int(self.nodes[i].getchaintipwork(), 16)) print("mempool:", self.nodes[i].getrawmempool()) print("join network") print("before join tips") for i in range(4): print(i, self.nodes[i].getchaintips(), int(self.nodes[i].getchaintipwork(), 16)) connect_nodes_bi(self.nodes, 1, 2) try: print("sync_mempools.......") sync_mempools(self.nodes, timeout=30) print("sync_mempools done") except Exception as e: print("sync mempool failed,ignore!") print("after join tips") for i in range(4): print(i, self.nodes[i].getchaintips(), int(self.nodes[i].getchaintipwork(), 16)) sync_blocks(self.nodes) if gen_blocks: for i in range(4): print("mempool:", self.nodes[i].getrawmempool()) for i in range(4): print(i, self.nodes[i].getblockcount(), int(self.nodes[i].getchaintipwork(), 16)) tips = self.nodes[0].getchaintips() print("tips:", tips) assert_equal(len(tips), self.tips_num + 1) self.tips_num += 1 # 合并后,节点再次调用合约,该交易应该回被不同组的节点抛弃,因为合约不存在 self.log.info( "when joined,contractCall will throw EXCEPTION because of the contractPublish transaction be droped by different group" ) tx1, tx2 = None, None # make sure contract publish transaction in mempool for i, c in enumerate(ccontracts_a): # sometimes assert failed here if c.publish_txid not in self.node0.getrawmempool(): print("OOPS!!!!!!!OMG!!!!That's IMPOSSABLE") print( "contractPublish transaction {} not in mempool,index is {}.When call will throw exception" .format(c.publish_txid, i)) result = ccontracts_a[2].call_reentrancyTest() if not result.reason(): tx1 = result.txid result = ccontracts_b[2].call_reentrancyTest() if not result.reason(): tx2 = result.txid try: sync_mempools(self.nodes, timeout=30) except Exception as e: print("sync_mempools(self.nodes,timeout = 30) not done") if tx1 and tx2: wait_until(lambda: tx1 not in self.node2.getrawmempool(), timeout=10) wait_until(lambda: tx1 in self.node1.getrawmempool(), timeout=10) if gen_blocks: # 因为tx2是主链交易,块同步后,可以找到合约的 wait_until(lambda: tx2 in self.node1.getrawmempool(), timeout=10) else: wait_until(lambda: tx2 not in self.node1.getrawmempool(), timeout=10) wait_until(lambda: tx2 in self.node3.getrawmempool(), timeout=10) else: print('tx1 and tx2 is None') for i, n in enumerate(self.nodes): try: n.generate(2) except Exception as e: self.log.info( "Don't know why!!node{} generate failed,reason:{}".format( i, repr(e))) raise print("node{} generate done".format(i)) sync_blocks(self.nodes)
assert_equal(result["remainingTransparentValue"], Decimal('0')) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["mergingShieldedValue"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) assert_equal(result["remainingShieldedValue"], Decimal('0')) opid2 = result['opid'] # wait for both aysnc operations to complete wait_and_assert_operationid_status(self.nodes[0], opid1) wait_and_assert_operationid_status(self.nodes[0], opid2) # sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected. # So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated # which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced. sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) # Generate enough blocks to ensure all transactions are mined while self.nodes[1].getmempoolinfo()['size'] > 0: self.nodes[1].generate(1) self.sync_all() # Verify maximum number of UTXOs which node 2 can shield is limited by option -mempooltxinputlimit # This option is used when the limit parameter is set to 0. result = self.nodes[2].z_mergetoaddress([n2taddr], myzaddr, Decimal('0.0001'), 0) assert_equal(result["mergingUTXOs"], Decimal('7')) assert_equal(result["remainingUTXOs"], Decimal('13')) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(self.nodes[2], result['opid']) self.sync_all() self.nodes[1].generate(1)
def run_test (self): print "Mining blocks..." self.nodes[0].generate(4) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 40) assert_equal(walletinfo['balance'], 0) self.sync_all() self.nodes[1].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance(), 40) assert_equal(self.nodes[1].getbalance(), 10) assert_equal(self.nodes[2].getbalance(), 0) assert_equal(self.nodes[0].getbalance("*"), 40) assert_equal(self.nodes[1].getbalance("*"), 10) assert_equal(self.nodes[2].getbalance("*"), 0) # Send 21 BTC from 0 to 2 using sendtoaddress call. # Second transaction will be child of first, and will require a fee self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. self.sync_all() self.nodes[0].generate(1) self.sync_all() # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].generate(100) self.sync_all() # node0 should end up with 50 btc in block rewards plus fees, but # minus the 21 plus fees sent to node2 assert_equal(self.nodes[0].getbalance(), 50-21) assert_equal(self.nodes[2].getbalance(), 21) assert_equal(self.nodes[0].getbalance("*"), 50-21) assert_equal(self.nodes[2].getbalance("*"), 21) # Node0 should have three unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 3) # Check 'generated' field of listunspent # Node 0: has one coinbase utxo and two regular utxos assert_equal(sum(int(uxto["generated"] is True) for uxto in node0utxos), 1) # Node 1: has 101 coinbase utxos and no regular utxos node1utxos = self.nodes[1].listunspent(1) assert_equal(len(node1utxos), 101) assert_equal(sum(int(uxto["generated"] is True) for uxto in node1utxos), 101) # Node 2: has no coinbase utxos and two regular utxos node2utxos = self.nodes[2].listunspent(1) assert_equal(len(node2utxos), 2) assert_equal(sum(int(uxto["generated"] is True) for uxto in node2utxos), 0) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]}) outputs[self.nodes[2].getnewaddress("")] = utxo["amount"] raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[2]["hex"], True) # Have node1 mine a block to confirm transactions: self.sync_all() self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 50) assert_equal(self.nodes[0].getbalance("*"), 0) assert_equal(self.nodes[2].getbalance("*"), 50) # Send 10 BTC normal address = self.nodes[0].getnewaddress("") self.nodes[2].settxfee(Decimal('0.001')) self.nodes[2].sendtoaddress(address, 10, "", "", False) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('39.99900000')) assert_equal(self.nodes[0].getbalance(), Decimal('10.00000000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('39.99900000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('10.00000000')) # Send 10 BTC with subtract fee from amount self.nodes[2].sendtoaddress(address, 10, "", "", True) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('29.99900000')) assert_equal(self.nodes[0].getbalance(), Decimal('19.99900000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('29.99900000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('19.99900000')) # Sendmany 10 BTC self.nodes[2].sendmany("", {address: 10}, 0, "", []) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('19.99800000')) assert_equal(self.nodes[0].getbalance(), Decimal('29.99900000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('19.99800000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('29.99900000')) # Sendmany 10 BTC with subtract fee from amount self.nodes[2].sendmany("", {address: 10}, 0, "", [address]) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('9.99800000')) assert_equal(self.nodes[0].getbalance(), Decimal('39.99800000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('9.99800000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('39.99800000')) # Test ResendWalletTransactions: # Create a couple of transactions, then start up a fourth # node (nodes[3]) and ask nodes[0] to rebroadcast. # EXPECT: nodes[3] should have those transactions in its mempool. txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) sync_mempools(self.nodes) self.nodes.append(start_node(3, self.options.tmpdir)) connect_nodes_bi(self.nodes, 0, 3) sync_blocks(self.nodes) relayed = self.nodes[0].resendwallettransactions() assert_equal(set(relayed), set([txid1, txid2])) sync_mempools(self.nodes) assert(txid1 in self.nodes[3].getrawmempool()) #check if we can list zero value tx as available coins #1. create rawtx #2. hex-changed one output to 0.0 #3. sign and send #4. check if recipient (node0) can list the zero value tx usp = self.nodes[1].listunspent() inputs = [{"txid":usp[0]['txid'], "vout":usp[0]['vout']}] outputs = {self.nodes[1].getnewaddress(): 9.998, self.nodes[0].getnewaddress(): 11.11} rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") #replace 11.11 with 0.0 (int32) decRawTx = self.nodes[1].decoderawtransaction(rawTx) signedRawTx = self.nodes[1].signrawtransaction(rawTx) decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex']) zeroValueTxid= decRawTx['txid'] self.nodes[1].sendrawtransaction(signedRawTx['hex']) self.sync_all() self.nodes[1].generate(1) #mine a block self.sync_all() unspentTxs = self.nodes[0].listunspent() #zero value tx must be in listunspents output found = False for uTx in unspentTxs: if uTx['txid'] == zeroValueTxid: found = True assert_equal(uTx['amount'], Decimal('0.00000000')); assert(found) #do some -walletbroadcast tests stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes(3, self.options.tmpdir, [["-walletbroadcast=0"],["-walletbroadcast=0"],["-walletbroadcast=0"]]) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) self.sync_all() txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2); txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) self.sync_all() self.nodes[1].generate(1) #mine a block, tx should not be in there self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('9.99800000')); #should not be changed because tx was not broadcasted assert_equal(self.nodes[2].getbalance("*"), Decimal('9.99800000')); #should not be changed because tx was not broadcasted #now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex']) self.sync_all() self.nodes[1].generate(1) self.sync_all() txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) assert_equal(self.nodes[2].getbalance(), Decimal('11.99800000')); #should not be assert_equal(self.nodes[2].getbalance("*"), Decimal('11.99800000')); #should not be #create another tx txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2); #restart the nodes with -walletbroadcast=1 stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes(3, self.options.tmpdir) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) sync_blocks(self.nodes) self.nodes[0].generate(1) sync_blocks(self.nodes) #tx should be added to balance because after restarting the nodes tx should be broadcastet assert_equal(self.nodes[2].getbalance(), Decimal('13.99800000')); #should not be assert_equal(self.nodes[2].getbalance("*"), Decimal('13.99800000')); #should not be # send from node 0 to node 2 taddr mytaddr = self.nodes[2].getnewaddress(); mytxid = self.nodes[0].sendtoaddress(mytaddr, 10.0); self.sync_all() self.nodes[0].generate(1) self.sync_all() mybalance = self.nodes[2].z_getbalance(mytaddr) assert_equal(mybalance, Decimal('10.0')); mytxdetails = self.nodes[2].gettransaction(mytxid) myvjoinsplits = mytxdetails["vjoinsplit"] assert_equal(0, len(myvjoinsplits)) # z_sendmany is expected to fail if tx size breaks limit myzaddr = self.nodes[0].z_getnewaddress() recipients = [] num_t_recipients = 3000 amount_per_recipient = Decimal('0.00000001') errorString = '' for i in xrange(0,num_t_recipients): newtaddr = self.nodes[2].getnewaddress() recipients.append({"address":newtaddr, "amount":amount_per_recipient}) try: self.nodes[0].z_sendmany(myzaddr, recipients) except JSONRPCException,e: errorString = e.error['message']
def run_test(self): testnode0 = TestNode() connections = [] connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], testnode0, "regtest", OVERWINTER_PROTO_VERSION)) testnode0.add_connection(connections[0]) # Start up network handling in another thread NetworkThread().start() testnode0.wait_for_verack() # Verify mininodes are connected to zcashd nodes peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) # Mine some blocks so we can spend coinbase_blocks = self.nodes[0].generate(200) node_address = self.nodes[0].getnewaddress() # Sync nodes 0 and 1 sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 0) # Mininodes send expiring soon transaction in "tx" message to zcashd node self.send_transaction(testnode0, coinbase_blocks[0], node_address, 203) # Assert that the tx is not in the mempool (expiring soon) assert_equal([], self.nodes[0].getrawmempool()) assert_equal([], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Mininodes send transaction in "tx" message to zcashd node tx2 = self.send_transaction(testnode0, coinbase_blocks[1], node_address, 204) # tx2 is not expiring soon assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) # node 2 is isolated assert_equal([], self.nodes[2].getrawmempool()) # Verify txid for tx2 self.verify_inv(testnode0, tx2) self.send_data_message(testnode0, tx2) self.verify_last_tx(testnode0, tx2) # Sync and mine an empty block with node 2, leaving tx in the mempool of node0 and node1 for blkhash in coinbase_blocks: blk = self.nodes[0].getblock(blkhash, 0) self.nodes[2].submitblock(blk) self.nodes[2].generate(1) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 201) # Reconnect node 2 to the network connect_nodes_bi(self.nodes, 0, 2) # Set up test node for node 2 testnode2 = TestNode() connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], testnode2, "regtest", OVERWINTER_PROTO_VERSION)) testnode2.add_connection(connections[-1]) # Verify block count sync_blocks(self.nodes[:3]) assert_equal(self.nodes[0].getblockcount(), 201) assert_equal(self.nodes[1].getblockcount(), 201) assert_equal(self.nodes[2].getblockcount(), 201) # Verify contents of mempool assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Confirm tx2 cannot be submitted to a mempool because it is expiring soon. try: rawtx2 = hexlify(tx2.serialize()) self.nodes[2].sendrawtransaction(rawtx2) fail("Sending transaction should have failed") except JSONRPCException as e: assert_equal( "tx-expiring-soon: expiryheight is 204 but should be at least 205 to avoid transaction expiring soon", e.error['message'] ) self.send_data_message(testnode0, tx2) # Sync up with node after p2p messages delivered testnode0.sync_with_ping() # Verify node 0 does not reply to "getdata" by sending "tx" message, as tx2 is expiring soon with mininode_lock: assert_equal(testnode0.last_tx, None) # Verify mininode received a "notfound" message containing the txid of tx2 with mininode_lock: msg = testnode0.last_notfound assert_equal(len(msg.inv), 1) assert_equal(tx2.sha256, msg.inv[0].hash) # Create a transaction to verify that processing of "getdata" messages is functioning tx3 = self.send_transaction(testnode0, coinbase_blocks[2], node_address, 999) self.send_data_message(testnode0, tx3) self.verify_last_tx(testnode0, tx3) # Verify txid for tx3 is returned in "inv", but tx2 which is expiring soon is not returned self.verify_inv(testnode0, tx3) self.verify_inv(testnode2, tx3) # Verify contents of mempool assert_equal({tx2.hash, tx3.hash}, set(self.nodes[0].getrawmempool())) assert_equal({tx2.hash, tx3.hash}, set(self.nodes[1].getrawmempool())) assert_equal({tx3.hash}, set(self.nodes[2].getrawmempool())) # Verify banscore for nodes are still zero assert_equal(0, sum(peer["banscore"] for peer in self.nodes[0].getpeerinfo())) assert_equal(0, sum(peer["banscore"] for peer in self.nodes[2].getpeerinfo())) [c.disconnect_node() for c in connections]
def run_test(self): miner = self.nodes[0] alice = self.nodes[1] # Fixed fee fee = 1 self.log.info("Mining 120 blocks...") miner.generate(120) self.sync_all() # Sanity-check the test harness assert_equal([x.getblockcount() for x in self.nodes], [120] * self.num_nodes) # miner sends a 10 PIV note to Alice self.log.info("Shielding some coins for Alice...") alice_zaddr = alice.getnewshieldaddress() miner.shieldsendmany("from_transparent", [{"address": alice_zaddr, "amount": Decimal('10.00')}], 1, fee) miner.generate(1) self.sync_all() assert_equal(alice.getshieldbalance(alice_zaddr), Decimal('10.00')) # Alice creates (but doesn't send) tx_A to transparent address tadd_A self.log.info("Alice creating tx_A...") tadd_A = alice.getnewaddress() rawTx_hex = alice.rawshieldsendmany(alice_zaddr, [{"address": tadd_A, "amount": Decimal('9.00')}], 1, fee) # Alice creates and sends tx_B, unshielding the same note to tadd_B self.log.info("Alice creating and sending tx_B...") tadd_B = alice.getnewaddress() txid_B = alice.shieldsendmany(alice_zaddr, [{"address": tadd_B, "amount": Decimal('9.00')}], 1, fee) # Miner receives tx_B and accepts it in the mempool assert (txid_B in alice.getrawmempool()) sync_mempools(self.nodes) assert(txid_B in miner.getrawmempool()) self.log.info("tx_B accepted in the memory pool.") # Now tx_A would double-spend the sapling note in the memory pool assert_raises_rpc_error(-26, "bad-txns-nullifier-double-spent", alice.sendrawtransaction, rawTx_hex) self.log.info("tx_A NOT accepted in the mempool. Good.") # Mine tx_B and try to send tx_A again self.log.info("Mine a block and verify that tx_B gets on chain") miner.generate(1) self.sync_all() txB_json = alice.getrawtransaction(txid_B, True) assert("blockhash" in txB_json) self.log.info("trying to relay tx_A again...") assert_raises_rpc_error(-26, "bad-txns-shielded-requirements-not-met", alice.sendrawtransaction, rawTx_hex) self.log.info("tx_A NOT accepted in the mempool. Good.") # miner sends another 10 PIV note to Alice self.log.info("Shielding some more coins for Alice...") miner.shieldsendmany("from_transparent", [{"address": alice_zaddr, "amount": Decimal('10.00')}], 1, fee) miner.generate(1) self.sync_all() assert_equal(alice.getshieldbalance(alice_zaddr), Decimal('10.00')) # Alice creates and sends tx_C, unshielding the note to tadd_C self.log.info("Alice creating and sending tx_C...") tadd_C = alice.getnewaddress() txC_hex = alice.rawshieldsendmany(alice_zaddr, [{"address": tadd_C, "amount": Decimal('9.00')}], 1, fee) txid_C = alice.sendrawtransaction(txC_hex) # Miner receives tx_C and accepts it in the mempool sync_mempools(self.nodes) assert(txid_C in miner.getrawmempool()) self.log.info("tx_C accepted in the memory pool.") # Now disconnect the block with the note's anchor, # and check that the tx is removed from the mempool self.log.info("Disconnect the last block to change the sapling anchor") anchor = alice.decoderawtransaction(txC_hex)['vShieldSpend'][0]['anchor'] assert_equal(anchor, miner.getbestsaplinganchor()) miner.invalidateblock(miner.getbestblockhash()) assert (anchor != miner.getbestsaplinganchor()) assert(txid_C not in miner.getrawmempool()) self.log.info("Good. tx_C removed from the memory pool.")
def run_test(self): ''' Mine some blocks and have them mature. ''' self.nodes[0].generate(101) utxo = self.nodes[0].listunspent(10) txid = utxo[0]['txid'] vout = utxo[0]['vout'] value = utxo[0]['amount'] fee = Decimal("0.0001") # MAX_ANCESTORS transactions off a confirmed tx should be fine chain = [] for i in range(MAX_ANCESTORS): (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1) value = sent_value chain.append(txid) # Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor # count and fees should look correct mempool = self.nodes[0].getrawmempool(True) assert_equal(len(mempool), MAX_ANCESTORS) descendant_count = 1 descendant_fees = 0 descendant_size = 0 ancestor_size = sum([mempool[tx]['size'] for tx in mempool]) ancestor_count = MAX_ANCESTORS ancestor_fees = sum([mempool[tx]['fee'] for tx in mempool]) descendants = [] ancestors = list(chain) for x in reversed(chain): # Check that getmempoolentry is consistent with getrawmempool entry = self.nodes[0].getmempoolentry(x) assert_equal(entry, mempool[x]) # Check that the descendant calculations are correct assert_equal(mempool[x]['descendantcount'], descendant_count) descendant_fees += mempool[x]['fee'] assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']) assert_equal(mempool[x]['fees']['base'], mempool[x]['fee']) assert_equal(mempool[x]['fees']['modified'], mempool[x]['modifiedfee']) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN) assert_equal(mempool[x]['fees']['descendant'], descendant_fees) descendant_size += mempool[x]['size'] assert_equal(mempool[x]['descendantsize'], descendant_size) descendant_count += 1 # Check that ancestor calculations are correct assert_equal(mempool[x]['ancestorcount'], ancestor_count) assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN) assert_equal(mempool[x]['ancestorsize'], ancestor_size) ancestor_size -= mempool[x]['size'] ancestor_fees -= mempool[x]['fee'] ancestor_count -= 1 # Check that parent/child list is correct assert_equal(mempool[x]['spentby'], descendants[-1:]) assert_equal(mempool[x]['depends'], ancestors[-2:-1]) # Check that getmempooldescendants is correct assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x))) # Check getmempooldescendants verbose output is correct for descendant, dinfo in self.nodes[0].getmempooldescendants( x, True).items(): assert_equal(dinfo['depends'], [chain[chain.index(descendant) - 1]]) if dinfo['descendantcount'] > 1: assert_equal(dinfo['spentby'], [chain[chain.index(descendant) + 1]]) else: assert_equal(dinfo['spentby'], []) descendants.append(x) # Check that getmempoolancestors is correct ancestors.remove(x) assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x))) # Check that getmempoolancestors verbose output is correct for ancestor, ainfo in self.nodes[0].getmempoolancestors( x, True).items(): assert_equal(ainfo['spentby'], [chain[chain.index(ancestor) + 1]]) if ainfo['ancestorcount'] > 1: assert_equal(ainfo['depends'], [chain[chain.index(ancestor) - 1]]) else: assert_equal(ainfo['depends'], []) # Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True) assert_equal(len(v_ancestors), len(chain) - 1) for x in v_ancestors.keys(): assert_equal(mempool[x], v_ancestors[x]) assert (chain[-1] not in v_ancestors.keys()) v_descendants = self.nodes[0].getmempooldescendants(chain[0], True) assert_equal(len(v_descendants), len(chain) - 1) for x in v_descendants.keys(): assert_equal(mempool[x], v_descendants[x]) assert (chain[0] not in v_descendants.keys()) # Check that ancestor modified fees includes fee deltas from # prioritisetransaction self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=1000) mempool = self.nodes[0].getrawmempool(True) ancestor_fees = 0 for x in chain: ancestor_fees += mempool[x]['fee'] assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees + Decimal('0.00001')) assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN + 1000) # Undo the prioritisetransaction for later tests self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=-1000) # Check that descendant modified fees includes fee deltas from # prioritisetransaction self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=1000) mempool = self.nodes[0].getrawmempool(True) descendant_fees = 0 for x in reversed(chain): descendant_fees += mempool[x]['fee'] assert_equal(mempool[x]['fees']['descendant'], descendant_fees + Decimal('0.00001')) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000) # Adding one more transaction on to the chain should fail. assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1) # Check that prioritising a tx before it's added to the mempool works # First clear the mempool by mining a block. self.nodes[0].generate(1) sync_blocks(self.nodes) assert_equal(len(self.nodes[0].getrawmempool()), 0) # Prioritise a transaction that has been mined, then add it back to the # mempool by using invalidateblock. self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=2000) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Keep node1's tip synced with node0 self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) # Now check that the transaction is in the mempool, with the right modified fee mempool = self.nodes[0].getrawmempool(True) descendant_fees = 0 for x in reversed(chain): descendant_fees += mempool[x]['fee'] if (x == chain[-1]): assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee'] + satoshi_round(0.00002)) assert_equal(mempool[x]['fees']['modified'], mempool[x]['fee'] + satoshi_round(0.00002)) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000) assert_equal(mempool[x]['fees']['descendant'], descendant_fees + satoshi_round(0.00002)) # TODO: check that node1's mempool is as expected # TODO: test ancestor size limits # Now test descendant chain limits txid = utxo[1]['txid'] value = utxo[1]['amount'] vout = utxo[1]['vout'] transaction_package = [] tx_children = [] # First create one parent tx with 10 children (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10) parent_transaction = txid for i in range(10): transaction_package.append({ 'txid': txid, 'vout': i, 'amount': sent_value }) # Sign and send up to MAX_DESCENDANT transactions chained off the parent tx for i in range(MAX_DESCENDANTS - 1): utxo = transaction_package.pop(0) (txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) if utxo['txid'] is parent_transaction: tx_children.append(txid) for j in range(10): transaction_package.append({ 'txid': txid, 'vout': j, 'amount': sent_value }) mempool = self.nodes[0].getrawmempool(True) assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS) assert_equal(sorted(mempool[parent_transaction]['spentby']), sorted(tx_children)) for child in tx_children: assert_equal(mempool[child]['depends'], [parent_transaction]) # Sending one more chained transaction will fail utxo = transaction_package.pop(0) assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) # TODO: check that node1's mempool is as expected # TODO: test descendant size limits # Test reorg handling # First, the basics: self.nodes[0].generate(1) sync_blocks(self.nodes) self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash()) self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash()) # Now test the case where node1 has a transaction T in its mempool that # depends on transactions A and B which are in a mined block, and the # block containing A and B is disconnected, AND B is not accepted back # into node1's mempool because its ancestor count is too high. # Create 8 transactions, like so: # Tx0 -> Tx1 (vout0) # \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7 # # Mine them in the next block, then generate a new tx8 that spends # Tx1 and Tx7, and add to node1's mempool, then disconnect the # last block. # Create tx0 with 2 outputs utxo = self.nodes[0].listunspent() txid = utxo[0]['txid'] value = utxo[0]['amount'] vout = utxo[0]['vout'] send_value = satoshi_round((value - fee) / 2) inputs = [{'txid': txid, 'vout': vout}] outputs = {} for i in range(2): outputs[self.nodes[0].getnewaddress()] = send_value rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) tx0_id = txid value = send_value # Create tx1 tx1_id, _ = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1) # Create tx2-7 vout = 1 txid = tx0_id for i in range(6): (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1) vout = 0 value = sent_value # Mine these in a block self.nodes[0].generate(1) self.sync_all() # Now generate tx8, with a big fee inputs = [{'txid': tx1_id, 'vout': 0}, {'txid': txid, 'vout': 0}] outputs = {self.nodes[0].getnewaddress(): send_value + value - 4 * fee} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) sync_mempools(self.nodes) # Now try to disconnect the tip on each node... self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) sync_blocks(self.nodes)
def test_mix_contract_transaction_fork(self, gen_blocks=False): ''' 在2条分叉链中,混合执行各种交易,然后: 1.不产生块,合并网络 2.产生块,合并网络 :return: ''' self.sync_all() self.node1.generate(2) assert_equal(self.node1.getrawmempool(), []) # make sure mempool empty assert_equal(self.node0.getrawmempool(), []) # make sure mempool empty ct = Contract(self.node0, self.options.tmpdir, debug=False) ct2 = Contract(self.node0, self.options.tmpdir, debug=False) ct2.call_payable(amount=1000) print(ct.publish_txid) self.sync_all() self.node0.generate(2) self.sync_all() blocks_num = self.node0.getblockcount() # split mgc network self.split_network() self.node0.generate(2) # fork self.node2.generate(8) # fork balances = [n.getbalance() for n in self.nodes] # in group 1 # normal transaction sendtxs_a = [ self.node0.sendtoaddress(self.node3.getnewaddress(), 1000) for i in range(5) ] # publish contract transaction ccontracts_a = [ Contract(self.node0, self.options.tmpdir, debug=False) for i in range(5) ] # call contract transaction call_contract_txs_a = [ ct.call_payable(amount=1000).txid for ct in ccontracts_a ] call_contract_txs_a1 = [ ct.call_callOtherContractTest(ccontracts_a[0].contract_id, 'callOtherContractTest', ccontracts_a[-1].contract_id, "contractDataTest").txid for ct in ccontracts_a ] # long mempool chain transaction for i in range(8): result = ccontracts_a[1].call_reentrancyTest(throw_exception=False) ccontracts_a[2].call_maxContractCallTest(2).txid self.sync_all([self.nodes[:2], self.nodes[2:]]) # in group 2 sendtxs_b = [ self.node2.sendtoaddress(self.node1.getnewaddress(), 1000) for i in range(5) ] # publish contract transaction ccontracts_b = [ Contract(self.node2, self.options.tmpdir, debug=False) for i in range(5) ] # call contract transaction call_contract_txs_b = [ ct.call_payable(amount=1000).txid for ct in ccontracts_b ] call_contract_txs_b1 = [ ct.call_callOtherContractTest(ccontracts_b[0].contract_id, 'callOtherContractTest', ccontracts_b[-1].contract_id, "contractDataTest").txid for ct in ccontracts_b ] # long mempool chain transaction for i in range(8): result = ccontracts_b[1].call_reentrancyTest(throw_exception=False) ccontracts_b[2].call_maxContractCallTest(2).txid self.sync_all([self.nodes[:2], self.nodes[2:]]) # join network if gen_blocks: blocks_a = self.node0.generate(2) blocks_b = self.node2.generate(6) more_work_blocks = self.make_more_work_than(2, 0) for i in range(4): print("before join:", i, self.nodes[i].getblockcount(), int(self.nodes[i].getchaintipwork(), 16)) print("mempool:", self.nodes[i].getrawmempool()) print("join network") connect_nodes_bi(self.nodes, 1, 2) try: print("sync_mempools.......") # sync_mempools(self.nodes, timeout=30) print("sync_mempools done") except Exception as e: print("sync mempool failed,ignore!") sync_blocks(self.nodes) if gen_blocks: for i in range(4): print("mempool:", self.nodes[i].getrawmempool()) for i in range(4): print(i, self.nodes[i].getblockcount(), int(self.nodes[i].getchaintipwork(), 16)) tips = self.nodes[0].getchaintips() print("tips:", tips) assert_equal(len(tips), self.tips_num + 1) self.tips_num += 1 # 合并后,节点再次调用合约,该交易应该回被不同组的节点抛弃,因为合约不存在 result = ccontracts_a[2].call_reentrancyTest() if not result.reason(): tx1 = result.txid result = ccontracts_b[2].call_reentrancyTest() if not result.reason(): tx2 = result.txid # tx1 = ccontracts_a[2].call_reentrancyTest().txid # tx2 = ccontracts_b[2].call_reentrancyTest().txid try: sync_mempools(self.nodes, timeout=30) except Exception as e: print("sync_mempools(self.nodes,timeout = 30) not done") # wait_until(lambda: tx1 not in self.node2.getrawmempool(), timeout=10) # wait_until(lambda: tx1 in self.node1.getrawmempool(), timeout=10) # wait_until(lambda: tx2 not in self.node1.getrawmempool(), timeout=10) # wait_until(lambda: tx2 in self.node3.getrawmempool(), timeout=10) for i, n in enumerate(self.nodes): n.generate(2) print("node{} generate done".format(i)) sync_blocks(self.nodes)
def run_test(self): print("Mining blocks...") self.nodes[0].generate(1) do_not_shield_taddr = self.nodes[0].getnewaddress() self.nodes[0].generate(4) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 50) assert_equal(walletinfo['balance'], 0) self.sync_all() self.nodes[2].generate(1) self.nodes[2].getnewaddress() self.nodes[2].generate(1) self.nodes[2].getnewaddress() self.nodes[2].generate(1) self.sync_all() self.nodes[1].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50) assert_equal(self.nodes[1].getbalance(), 10) assert_equal(self.nodes[2].getbalance(), 30) # Shield the coinbase myzaddr = self.nodes[0].z_getnewaddress('sprout') result = self.nodes[0].z_shieldcoinbase("*", myzaddr, 0) wait_and_assert_operationid_status(self.nodes[0], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # Prepare some UTXOs and notes for merging mytaddr = self.nodes[0].getnewaddress() mytaddr2 = self.nodes[0].getnewaddress() mytaddr3 = self.nodes[0].getnewaddress() result = self.nodes[0].z_sendmany(myzaddr, [ { 'address': do_not_shield_taddr, 'amount': 10 }, { 'address': mytaddr, 'amount': 10 }, { 'address': mytaddr2, 'amount': 10 }, { 'address': mytaddr3, 'amount': 10 }, ], 1, 0) wait_and_assert_operationid_status(self.nodes[0], result) self.sync_all() self.nodes[1].generate(1) self.sync_all() # Merging will fail because from arguments need to be in an array try: self.nodes[0].z_mergetoaddress("*", myzaddr) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal("JSON value is not an array as expected" in errorString, True) # Merging will fail when trying to spend from watch-only address self.nodes[2].importaddress(mytaddr) try: self.nodes[2].z_mergetoaddress([mytaddr], myzaddr) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal("Could not find any funds to merge" in errorString, True) # Merging will fail because fee is negative try: self.nodes[0].z_mergetoaddress(["*"], myzaddr, -1) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal("Amount out of range" in errorString, True) # Merging will fail because fee is larger than MAX_MONEY try: self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('21000000.00000001')) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal("Amount out of range" in errorString, True) # Merging will fail because fee is larger than sum of UTXOs try: self.nodes[0].z_mergetoaddress(["*"], myzaddr, 999) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal("Insufficient funds" in errorString, True) # Merging will fail because transparent limit parameter must be at least 0 try: self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), -1) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal( "Limit on maximum number of UTXOs cannot be negative" in errorString, True) # Merging will fail because transparent limit parameter is absurdly large try: self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), 99999999999999) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal("JSON integer out of range" in errorString, True) # Merging will fail because shielded limit parameter must be at least 0 try: self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), 50, -1) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal( "Limit on maximum number of notes cannot be negative" in errorString, True) # Merging will fail because shielded limit parameter is absurdly large try: self.nodes[0].z_mergetoaddress(["*"], myzaddr, Decimal('0.001'), 50, 99999999999999) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal("JSON integer out of range" in errorString, True) # Merging will fail for this specific case where it would spend a fee and do nothing try: self.nodes[0].z_mergetoaddress([mytaddr], mytaddr) assert (False) except JSONRPCException as e: errorString = e.error['message'] assert_equal( "Destination address is also the only source address, and all its funds are already merged" in errorString, True) # Merge UTXOs from node 0 of value 30, standard fee of 0.00010000 result = self.nodes[0].z_mergetoaddress([mytaddr, mytaddr2, mytaddr3], myzaddr) wait_and_assert_operationid_status(self.nodes[0], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # Confirm balances and that do_not_shield_taddr containing funds of 10 was left alone assert_equal(self.nodes[0].getbalance(), 10) assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0')) assert_equal(self.nodes[0].z_getbalance(myzaddr), Decimal('39.99990000')) assert_equal(self.nodes[1].getbalance(), 40) assert_equal(self.nodes[2].getbalance(), 30) # Shield all notes to another z-addr myzaddr2 = self.nodes[0].z_getnewaddress('sprout') result = self.nodes[0].z_mergetoaddress(["ANY_ZADDR"], myzaddr2, 0) assert_equal(result["mergingUTXOs"], Decimal('0')) assert_equal(result["remainingUTXOs"], Decimal('0')) assert_equal(result["mergingNotes"], Decimal('2')) assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(self.nodes[0], result['opid']) self.sync_all() blockhash = self.nodes[1].generate(1) self.sync_all() assert_equal(len(self.nodes[0].getblock(blockhash[0])['tx']), 2) assert_equal(self.nodes[0].z_getbalance(myzaddr), 0) assert_equal(self.nodes[0].z_getbalance(myzaddr2), Decimal('39.99990000')) # Shield coinbase UTXOs from any node 2 taddr, and set fee to 0 result = self.nodes[2].z_shieldcoinbase("*", myzaddr, 0) wait_and_assert_operationid_status(self.nodes[2], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 10) assert_equal(self.nodes[0].z_getbalance(myzaddr), Decimal('30')) assert_equal(self.nodes[0].z_getbalance(myzaddr2), Decimal('39.99990000')) assert_equal(self.nodes[1].getbalance(), 60) assert_equal(self.nodes[2].getbalance(), 0) # Merge all notes from node 0 into a node 0 taddr, and set fee to 0 result = self.nodes[0].z_mergetoaddress(["ANY_ZADDR"], mytaddr, 0) wait_and_assert_operationid_status(self.nodes[0], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), Decimal('79.99990000')) assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0')) assert_equal(self.nodes[0].z_getbalance(mytaddr), Decimal('69.99990000')) assert_equal(self.nodes[0].z_getbalance(myzaddr), 0) assert_equal(self.nodes[0].z_getbalance(myzaddr2), 0) assert_equal(self.nodes[1].getbalance(), 70) assert_equal(self.nodes[2].getbalance(), 0) # Merge all node 0 UTXOs together into a node 1 taddr, and set fee to 0 self.nodes[1].getnewaddress() # Ensure we have an empty address n1taddr = self.nodes[1].getnewaddress() result = self.nodes[0].z_mergetoaddress(["ANY_TADDR"], n1taddr, 0) wait_and_assert_operationid_status(self.nodes[0], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), 0) assert_equal(self.nodes[0].z_getbalance(mytaddr), 0) assert_equal(self.nodes[0].z_getbalance(myzaddr), 0) assert_equal(self.nodes[1].getbalance(), Decimal('159.99990000')) assert_equal(self.nodes[1].z_getbalance(n1taddr), Decimal('79.99990000')) assert_equal(self.nodes[2].getbalance(), 0) # Generate 800 regular UTXOs on node 0, and 20 regular UTXOs on node 2 mytaddr = self.nodes[0].getnewaddress() n2taddr = self.nodes[2].getnewaddress() self.nodes[1].generate(1000) self.sync_all() for i in range(800): self.nodes[1].sendtoaddress(mytaddr, 1) for i in range(20): self.nodes[1].sendtoaddress(n2taddr, 1) self.nodes[1].generate(1) self.sync_all() # Merging the 800 UTXOs will occur over two transactions, since max tx size is 100,000 bytes. # We don't verify mergingTransparentValue as UTXOs are not selected in any specific order, so value can change on each test run. # We set an unrealistically high limit parameter of 99999, to verify that max tx size will constrain the number of UTXOs. result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 99999) assert_equal(result["mergingUTXOs"], Decimal('662')) assert_equal(result["remainingUTXOs"], Decimal('138')) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["mergingShieldedValue"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) assert_equal(result["remainingShieldedValue"], Decimal('0')) remainingTransparentValue = result["remainingTransparentValue"] opid1 = result['opid'] # Verify that UTXOs are locked (not available for selection) by queuing up another merging operation result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 0) assert_equal(result["mergingUTXOs"], Decimal('138')) assert_equal(result["mergingTransparentValue"], Decimal(remainingTransparentValue)) assert_equal(result["remainingUTXOs"], Decimal('0')) assert_equal(result["remainingTransparentValue"], Decimal('0')) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["mergingShieldedValue"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) assert_equal(result["remainingShieldedValue"], Decimal('0')) opid2 = result['opid'] # wait for both aysnc operations to complete wait_and_assert_operationid_status(self.nodes[0], opid1) wait_and_assert_operationid_status(self.nodes[0], opid2) # sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected. # So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated # which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced. sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) # Generate enough blocks to ensure all transactions are mined while self.nodes[1].getmempoolinfo()['size'] > 0: self.nodes[1].generate(1) self.sync_all() # Verify maximum number of UTXOs which node 0 can shield is set by default limit parameter of 50 mytaddr = self.nodes[0].getnewaddress() for i in range(100): self.nodes[1].sendtoaddress(mytaddr, 1) self.nodes[1].generate(1) self.sync_all() result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, Decimal('0.0001')) assert_equal(result["mergingUTXOs"], Decimal('50')) assert_equal(result["remainingUTXOs"], Decimal('50')) assert_equal(result["mergingNotes"], Decimal('0')) # Remaining notes are only counted if we are trying to merge any notes assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(self.nodes[0], result['opid']) # Verify maximum number of UTXOs which node 0 can shield can be set by the limit parameter result = self.nodes[0].z_mergetoaddress([mytaddr], myzaddr, Decimal('0.0001'), 33) assert_equal(result["mergingUTXOs"], Decimal('33')) assert_equal(result["remainingUTXOs"], Decimal('17')) assert_equal(result["mergingNotes"], Decimal('0')) # Remaining notes are only counted if we are trying to merge any notes assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(self.nodes[0], result['opid']) sync_blocks(self.nodes) sync_mempools(self.nodes) self.nodes[1].generate(1) self.sync_all() # Verify maximum number of notes which node 0 can shield can be set by the limit parameter # Also check that we can set off a second merge before the first one is complete # myzaddr has 5 notes at this point result1 = self.nodes[0].z_mergetoaddress([myzaddr], myzaddr, 0.0001, 50, 2) result2 = self.nodes[0].z_mergetoaddress([myzaddr], myzaddr, 0.0001, 50, 2) # First merge should select from all notes assert_equal(result1["mergingUTXOs"], Decimal('0')) # Remaining UTXOs are only counted if we are trying to merge any UTXOs assert_equal(result1["remainingUTXOs"], Decimal('0')) assert_equal(result1["mergingNotes"], Decimal('2')) assert_equal(result1["remainingNotes"], Decimal('3')) # Second merge should ignore locked notes assert_equal(result2["mergingUTXOs"], Decimal('0')) assert_equal(result2["remainingUTXOs"], Decimal('0')) assert_equal(result2["mergingNotes"], Decimal('2')) assert_equal(result2["remainingNotes"], Decimal('1')) wait_and_assert_operationid_status(self.nodes[0], result1['opid']) wait_and_assert_operationid_status(self.nodes[0], result2['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # Shield both UTXOs and notes to a z-addr result = self.nodes[0].z_mergetoaddress(["*"], myzaddr, 0, 10, 2) assert_equal(result["mergingUTXOs"], Decimal('10')) assert_equal(result["remainingUTXOs"], Decimal('7')) assert_equal(result["mergingNotes"], Decimal('2')) assert_equal(result["remainingNotes"], Decimal('1')) wait_and_assert_operationid_status(self.nodes[0], result['opid']) sync_blocks(self.nodes) sync_mempools(self.nodes) self.nodes[1].generate(1) self.sync_all()
def run_test(self, test): print("Mining blocks...") test.nodes[0].generate(1) do_not_shield_taddr = test.nodes[0].getnewaddress() test.nodes[0].generate(4) test.sync_all() walletinfo = test.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 3920400 * 0.97) assert_equal(walletinfo['balance'], 0) test.sync_all() test.nodes[2].generate(1) test.nodes[2].getnewaddress() test.nodes[2].generate(1) test.nodes[2].getnewaddress() test.nodes[2].generate(1) test.sync_all() test.nodes[1].generate(101) test.sync_all() assert_equal(test.nodes[0].getbalance(), 3920400 * 0.97) assert_equal(test.nodes[1].getbalance(), 100 * 0.97) assert_equal(test.nodes[2].getbalance(), 300 * 0.97) # Shield the coinbase myzaddr = test.nodes[0].z_getnewaddress(self.addr_type) result = test.nodes[0].z_shieldcoinbase("*", myzaddr, 0) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Prepare some UTXOs and notes for merging mytaddr = test.nodes[0].getnewaddress() mytaddr2 = test.nodes[0].getnewaddress() mytaddr3 = test.nodes[0].getnewaddress() result = test.nodes[0].z_sendmany(myzaddr, [ { 'address': do_not_shield_taddr, 'amount': 10 }, { 'address': mytaddr, 'amount': 10 }, { 'address': mytaddr2, 'amount': 10 }, { 'address': mytaddr3, 'amount': 10 }, ], 1, 0) wait_and_assert_operationid_status(test.nodes[0], result) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Merging will fail because from arguments need to be in an array assert_mergetoaddress_exception( "JSON value is not an array as expected", lambda: test.nodes[0].z_mergetoaddress("notanarray", myzaddr)) # Merging will fail when trying to spend from watch-only address test.nodes[2].importaddress(mytaddr) assert_mergetoaddress_exception( "Could not find any funds to merge.", lambda: test.nodes[2].z_mergetoaddress([mytaddr], myzaddr)) # Merging will fail because fee is negative assert_mergetoaddress_exception( "Amount out of range", lambda: test.nodes[0].z_mergetoaddress( self.any_zaddr_or_utxo, myzaddr, -1)) # Merging will fail because fee is larger than MAX_MONEY assert_mergetoaddress_exception( "Amount out of range", lambda: test.nodes[0].z_mergetoaddress( self.any_zaddr_or_utxo, myzaddr, Decimal('214160000.1'))) # Merging will fail because fee is larger than sum of UTXOs assert_mergetoaddress_exception( "Insufficient funds, have 3802788.00, which is less than miners fee 9999999.00", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, 9999999)) # Merging will fail because transparent limit parameter must be at least 0 assert_mergetoaddress_exception( "Limit on maximum number of UTXOs cannot be negative", lambda: test.nodes[0].z_mergetoaddress( self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), -1)) # Merging will fail because transparent limit parameter is absurdly large assert_mergetoaddress_exception( "JSON integer out of range", lambda: test.nodes[ 0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), 99999999999999)) # Merging will fail because shielded limit parameter must be at least 0 assert_mergetoaddress_exception( "Limit on maximum number of notes cannot be negative", lambda: test.nodes[0].z_mergetoaddress( self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), 50, -1)) # Merging will fail because shielded limit parameter is absurdly large assert_mergetoaddress_exception( "JSON integer out of range", lambda: test.nodes[ 0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), 50, 99999999999999)) # Merging will fail for this specific case where it would spend a fee and do nothing assert_mergetoaddress_exception( "Destination address is also the only source address, and all its funds are already merged.", lambda: test.nodes[0].z_mergetoaddress([mytaddr], mytaddr)) # Merging will fail if we try to specify from Sprout AND Sapling assert_mergetoaddress_exception( "Cannot send from both Sprout and Sapling addresses using z_mergetoaddress", lambda: test.nodes[0].z_mergetoaddress( ["ANY_SPROUT", "ANY_SAPLING"], mytaddr)) # Merge UTXOs from node 0 of value 30, default fee result = test.nodes[0].z_mergetoaddress([mytaddr, mytaddr2, mytaddr3], myzaddr) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Confirm balances and that do_not_shield_taddr containing funds of 10 was left alone assert_equal(test.nodes[0].getbalance(), 10) assert_equal(test.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0')) assert_equal(test.nodes[0].z_getbalance(myzaddr), Decimal('3802778.0') - DEFAULT_FEE) assert_equal(test.nodes[1].getbalance(), 400 * 0.97) assert_equal(test.nodes[2].getbalance(), 300 * 0.97) # Shield all notes to another z-addr myzaddr2 = test.nodes[0].z_getnewaddress(self.addr_type) result = test.nodes[0].z_mergetoaddress(self.any_zaddr, myzaddr2, 0) assert_equal(result["mergingUTXOs"], Decimal('0')) assert_equal(result["remainingUTXOs"], Decimal('0')) assert_equal(result["mergingNotes"], Decimal('2')) assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() blockhash = test.nodes[1].generate(1) test.sync_all() assert_equal(len(test.nodes[0].getblock(blockhash[0])['tx']), 2) assert_equal(test.nodes[0].z_getbalance(myzaddr), 0) assert_equal(test.nodes[0].z_getbalance(myzaddr2), Decimal('3802778.0') - DEFAULT_FEE) # Shield coinbase UTXOs from any node 2 taddr, and set fee to 0 result = test.nodes[2].z_shieldcoinbase("*", myzaddr, 0) wait_and_assert_operationid_status(test.nodes[2], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() assert_equal(test.nodes[0].getbalance(), 10) assert_equal(test.nodes[0].z_getbalance(myzaddr), Decimal('291')) assert_equal(test.nodes[0].z_getbalance(myzaddr2), Decimal('3802778.0') - DEFAULT_FEE) assert_equal(test.nodes[1].getbalance(), 600 * 0.97) assert_equal(test.nodes[2].getbalance(), 0) # Merge all notes from node 0 into a node 0 taddr, and set fee to 0 result = test.nodes[0].z_mergetoaddress(self.any_zaddr, mytaddr, 0) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() assert_equal(test.nodes[0].getbalance(), Decimal('3803079.0') - DEFAULT_FEE) assert_equal(test.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0')) assert_equal(test.nodes[0].z_getbalance(mytaddr), Decimal('3803069.0') - DEFAULT_FEE) assert_equal(test.nodes[0].z_getbalance(myzaddr), 0) assert_equal(test.nodes[0].z_getbalance(myzaddr2), 0) assert_equal(test.nodes[1].getbalance(), 679) assert_equal(test.nodes[2].getbalance(), 0) # Merge all node 0 UTXOs together into a node 1 taddr, and set fee to 0 test.nodes[1].getnewaddress() # Ensure we have an empty address n1taddr = test.nodes[1].getnewaddress() result = test.nodes[0].z_mergetoaddress(["ANY_TADDR"], n1taddr, 0) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() assert_equal(test.nodes[0].getbalance(), 0) assert_equal(test.nodes[0].z_getbalance(do_not_shield_taddr), 0) assert_equal(test.nodes[0].z_getbalance(mytaddr), 0) assert_equal(test.nodes[0].z_getbalance(myzaddr), 0) assert_equal(test.nodes[1].getbalance(), Decimal('3803855.0') - DEFAULT_FEE) assert_equal(test.nodes[1].z_getbalance(n1taddr), Decimal('3803079.0') - DEFAULT_FEE) assert_equal(test.nodes[2].getbalance(), 0) # Generate self.utxos_to_generate regular UTXOs on node 0, and 20 regular UTXOs on node 2 mytaddr = test.nodes[0].getnewaddress() n2taddr = test.nodes[2].getnewaddress() test.nodes[1].generate(1000) test.sync_all() for i in range(self.utxos_to_generate): test.nodes[1].sendtoaddress(mytaddr, 1) for i in range(20): test.nodes[1].sendtoaddress(n2taddr, 1) test.nodes[1].generate(1) test.sync_all() # Merging the UTXOs will conditionally occur over two transactions, since max tx size is 100,000 bytes before Sapling and 2,000,000 after. # We don't verify mergingTransparentValue as UTXOs are not selected in any specific order, so value can change on each test run. # We set an unrealistically high limit parameter of 99999, to verify that max tx size will constrain the number of UTXOs. result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 99999) assert_equal(result["mergingUTXOs"], self.utxos_in_tx1) assert_equal(result["remainingUTXOs"], self.utxos_in_tx2) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["mergingShieldedValue"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) assert_equal(result["remainingShieldedValue"], Decimal('0')) remainingTransparentValue = result["remainingTransparentValue"] wait_and_assert_operationid_status(test.nodes[0], result['opid']) # For sapling we do not check that this occurs over two transactions because of the time that it would take if self.utxos_in_tx2 > 0: # Verify that UTXOs are locked (not available for selection) by queuing up another merging operation result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 0) assert_equal(result["mergingUTXOs"], self.utxos_in_tx2) assert_equal(result["mergingTransparentValue"], Decimal(remainingTransparentValue)) assert_equal(result["remainingUTXOs"], Decimal('0')) assert_equal(result["remainingTransparentValue"], Decimal('0')) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["mergingShieldedValue"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) assert_equal(result["remainingShieldedValue"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) # sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected. # So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated # which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced. sync_blocks(test.nodes[:2]) sync_mempools(test.nodes[:2]) # Generate enough blocks to ensure all transactions are mined while test.nodes[1].getmempoolinfo()['size'] > 0: test.nodes[1].generate(1) test.sync_all() # Verify maximum number of UTXOs which node 2 can shield is not limited # when the limit parameter is set to 0. expected_to_merge = 20 expected_remaining = 0 result = test.nodes[2].z_mergetoaddress([n2taddr], myzaddr, DEFAULT_FEE, 0) assert_equal(result["mergingUTXOs"], expected_to_merge) assert_equal(result["remainingUTXOs"], expected_remaining) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[2], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Verify maximum number of UTXOs which node 0 can shield is set by default limit parameter of 50 mytaddr = test.nodes[0].getnewaddress() for i in range(100): test.nodes[1].sendtoaddress(mytaddr, 1) test.nodes[1].generate(1) test.sync_all() result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, DEFAULT_FEE) assert_equal(result["mergingUTXOs"], Decimal('50')) assert_equal(result["remainingUTXOs"], Decimal('50')) assert_equal(result["mergingNotes"], Decimal('0')) # Remaining notes are only counted if we are trying to merge any notes assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) # Verify maximum number of UTXOs which node 0 can shield can be set by the limit parameter result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, DEFAULT_FEE, 33) assert_equal(result["mergingUTXOs"], Decimal('33')) assert_equal(result["remainingUTXOs"], Decimal('17')) assert_equal(result["mergingNotes"], Decimal('0')) # Remaining notes are only counted if we are trying to merge any notes assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) # Don't sync node 2 which rejects the tx due to its mempooltxinputlimit sync_blocks(test.nodes[:2]) sync_mempools(test.nodes[:2]) test.nodes[1].generate(1) test.sync_all() # Verify maximum number of notes which node 0 can shield can be set by the limit parameter # Also check that we can set off a second merge before the first one is complete # myzaddr will have 5 notes if testing before to Sapling activation and 4 otherwise num_notes = len(test.nodes[0].z_listunspent(0)) result1 = test.nodes[0].z_mergetoaddress([myzaddr], myzaddr, DEFAULT_FEE, 50, 2) result2 = test.nodes[0].z_mergetoaddress([myzaddr], myzaddr, DEFAULT_FEE, 50, 2) # First merge should select from all notes assert_equal(result1["mergingUTXOs"], Decimal('0')) # Remaining UTXOs are only counted if we are trying to merge any UTXOs assert_equal(result1["remainingUTXOs"], Decimal('0')) assert_equal(result1["mergingNotes"], Decimal('2')) assert_equal(result1["remainingNotes"], num_notes - 2) # Second merge should ignore locked notes assert_equal(result2["mergingUTXOs"], Decimal('0')) assert_equal(result2["remainingUTXOs"], Decimal('0')) assert_equal(result2["mergingNotes"], Decimal('2')) assert_equal(result2["remainingNotes"], num_notes - 4) wait_and_assert_operationid_status(test.nodes[0], result1['opid']) wait_and_assert_operationid_status(test.nodes[0], result2['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Shield both UTXOs and notes to a z-addr result = test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, 0, 10, 2) assert_equal(result["mergingUTXOs"], Decimal('10')) assert_equal(result["remainingUTXOs"], Decimal('7')) assert_equal(result["mergingNotes"], Decimal('2')) assert_equal(result["remainingNotes"], num_notes - 4) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all()
def run_test(self): testnode0 = TestNode() connections = [] connections.append( NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], testnode0, "regtest", OVERWINTER_PROTO_VERSION)) testnode0.add_connection(connections[0]) # Start up network handling in another thread NetworkThread().start() testnode0.wait_for_verack() # Verify mininodes are connected to zprimed nodes peerinfo = self.nodes[0].getpeerinfo() versions = [x["version"] for x in peerinfo] assert_equal(1, versions.count(OVERWINTER_PROTO_VERSION)) assert_equal(0, peerinfo[0]["banscore"]) # Mine some blocks so we can spend coinbase_blocks = self.nodes[0].generate(200) node_address = self.nodes[0].getnewaddress() # Sync nodes 0 and 1 sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 0) # Mininodes send expiring soon transaction in "tx" message to zprimed node self.send_transaction(testnode0, coinbase_blocks[0], node_address, 203) # Assert that the tx is not in the mempool (expiring soon) assert_equal([], self.nodes[0].getrawmempool()) assert_equal([], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Mininodes send transaction in "tx" message to zprimed node tx2 = self.send_transaction(testnode0, coinbase_blocks[1], node_address, 204) # tx2 is not expiring soon assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) # node 2 is isolated assert_equal([], self.nodes[2].getrawmempool()) # Verify txid for tx2 self.verify_inv(testnode0, tx2) self.send_data_message(testnode0, tx2) self.verify_last_tx(testnode0, tx2) # Sync and mine an empty block with node 2, leaving tx in the mempool of node0 and node1 for blkhash in coinbase_blocks: blk = self.nodes[0].getblock(blkhash, 0) self.nodes[2].submitblock(blk) self.nodes[2].generate(1) # Verify block count assert_equal(self.nodes[0].getblockcount(), 200) assert_equal(self.nodes[1].getblockcount(), 200) assert_equal(self.nodes[2].getblockcount(), 201) # Reconnect node 2 to the network connect_nodes_bi(self.nodes, 0, 2) # Set up test node for node 2 testnode2 = TestNode() connections.append( NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], testnode2, "regtest", OVERWINTER_PROTO_VERSION)) testnode2.add_connection(connections[-1]) # Verify block count sync_blocks(self.nodes[:3]) assert_equal(self.nodes[0].getblockcount(), 201) assert_equal(self.nodes[1].getblockcount(), 201) assert_equal(self.nodes[2].getblockcount(), 201) # Verify contents of mempool assert_equal([tx2.hash], self.nodes[0].getrawmempool()) assert_equal([tx2.hash], self.nodes[1].getrawmempool()) assert_equal([], self.nodes[2].getrawmempool()) # Confirm tx2 cannot be submitted to a mempool because it is expiring soon. try: rawtx2 = hexlify(tx2.serialize()) self.nodes[2].sendrawtransaction(rawtx2) fail("Sending transaction should have failed") except JSONRPCException as e: assert_equal( "tx-expiring-soon: expiryheight is 204 but should be at least 205 to avoid transaction expiring soon", e.error['message']) self.send_data_message(testnode0, tx2) # Sync up with node after p2p messages delivered testnode0.sync_with_ping() # Verify node 0 does not reply to "getdata" by sending "tx" message, as tx2 is expiring soon with mininode_lock: assert_equal(testnode0.last_tx, None) # Verify mininode received a "notfound" message containing the txid of tx2 with mininode_lock: msg = testnode0.last_notfound assert_equal(len(msg.inv), 1) assert_equal(tx2.sha256, msg.inv[0].hash) # Create a transaction to verify that processing of "getdata" messages is functioning tx3 = self.send_transaction(testnode0, coinbase_blocks[2], node_address, 999) self.send_data_message(testnode0, tx3) self.verify_last_tx(testnode0, tx3) # Verify txid for tx3 is returned in "inv", but tx2 which is expiring soon is not returned self.verify_inv(testnode0, tx3) self.verify_inv(testnode2, tx3) # Verify contents of mempool assert_equal({tx2.hash, tx3.hash}, set(self.nodes[0].getrawmempool())) assert_equal({tx2.hash, tx3.hash}, set(self.nodes[1].getrawmempool())) assert_equal({tx3.hash}, set(self.nodes[2].getrawmempool())) # Verify banscore for nodes are still zero assert_equal( 0, sum(peer["banscore"] for peer in self.nodes[0].getpeerinfo())) assert_equal( 0, sum(peer["banscore"] for peer in self.nodes[2].getpeerinfo())) [c.disconnect_node() for c in connections]
def run_test(self): print("Mining blocks...") self.nodes[0].generate(1) self.nodes[0].generate(4) self.sync_all() walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 50) assert_equal(walletinfo['balance'], 0) self.sync_all() self.nodes[2].generate(1) self.nodes[2].generate(1) self.nodes[2].generate(1) self.sync_all() self.nodes[1].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50) assert_equal(self.nodes[1].getbalance(), 10) assert_equal(self.nodes[2].getbalance(), 30) # create one zaddr that is the target of all shielding myzaddr = self.test_init_zaddr(self.nodes[0]) do_not_shield_taddr = get_coinbase_address(self.nodes[0], 1) # Prepare to send taddr->zaddr mytaddr = get_coinbase_address(self.nodes[0], 4) # Shielding will fail when trying to spend from watch-only address self.nodes[2].importaddress(mytaddr) try: self.nodes[2].z_shieldcoinbase(mytaddr, myzaddr) except JSONRPCException as e: errorString = e.error['message'] assert_equal( "Could not find any coinbase funds to shield" in errorString, True) # Shielding will fail because fee is negative try: self.nodes[0].z_shieldcoinbase("*", myzaddr, -1) except JSONRPCException as e: errorString = e.error['message'] assert_equal("Amount out of range" in errorString, True) # Shielding will fail because fee is larger than MAX_MONEY try: self.nodes[0].z_shieldcoinbase("*", myzaddr, Decimal('21000000.00000001')) except JSONRPCException as e: errorString = e.error['message'] assert_equal("Amount out of range" in errorString, True) # Shielding will fail because fee is larger than sum of utxos try: self.nodes[0].z_shieldcoinbase("*", myzaddr, 999) except JSONRPCException as e: errorString = e.error['message'] assert_equal("Insufficient coinbase funds" in errorString, True) # Shielding will fail because limit parameter must be at least 0 try: self.nodes[0].z_shieldcoinbase("*", myzaddr, Decimal('0.001'), -1) except JSONRPCException as e: errorString = e.error['message'] assert_equal( "Limit on maximum number of utxos cannot be negative" in errorString, True) # Shielding will fail because limit parameter is absurdly large try: self.nodes[0].z_shieldcoinbase("*", myzaddr, Decimal('0.001'), 99999999999999) except JSONRPCException as e: errorString = e.error['message'] assert_equal("JSON integer out of range" in errorString, True) # Shield coinbase utxos from node 0 of value 40, standard fee result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr) wait_and_assert_operationid_status(self.nodes[0], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # Confirm balances and that do_not_shield_taddr containing funds of 10 was left alone assert_equal(self.nodes[0].getbalance(), 10) assert_equal(self.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0')) self.test_check_balance_zaddr(self.nodes[0], Decimal('40.0') - DEFAULT_FEE) assert_equal(self.nodes[1].getbalance(), 20) assert_equal(self.nodes[2].getbalance(), 30) # Shield coinbase utxos from any node 2 taddr, and set fee to 0 result = self.nodes[2].z_shieldcoinbase("*", myzaddr, 0) wait_and_assert_operationid_status(self.nodes[2], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 10) self.test_check_balance_zaddr(self.nodes[0], Decimal('70.0') - DEFAULT_FEE) assert_equal(self.nodes[1].getbalance(), 30) assert_equal(self.nodes[2].getbalance(), 0) # Generate 800 coinbase utxos on node 0, and 20 coinbase utxos on node 2 self.nodes[0].generate(800) self.sync_all() self.nodes[2].generate(20) self.sync_all() self.nodes[1].generate(100) self.sync_all() mytaddr = get_coinbase_address(self.nodes[0], 800) def verify_locking(first, second, limit): result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, 0, limit) assert_equal(result["shieldingUTXOs"], Decimal(first)) assert_equal(result["remainingUTXOs"], Decimal(second)) remainingValue = result["remainingValue"] opid1 = result['opid'] # Verify that utxos are locked (not available for selection) by queuing up another shielding operation result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, 0, 0) assert_equal(result["shieldingValue"], Decimal(remainingValue)) assert_equal(result["shieldingUTXOs"], Decimal(second)) assert_equal(result["remainingValue"], Decimal('0')) assert_equal(result["remainingUTXOs"], Decimal('0')) opid2 = result['opid'] # wait for both async operations to complete wait_and_assert_operationid_status(self.nodes[0], opid1) wait_and_assert_operationid_status(self.nodes[0], opid2) # Shield the 800 utxos over two transactions verify_locking('500', '300', 500) # sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected. # So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated # which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced. sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) self.nodes[1].generate(1) self.sync_all() # Verify maximum number of utxos which node 0 can shield is set by default limit parameter of 50 self.nodes[0].generate(200) self.sync_all() mytaddr = get_coinbase_address(self.nodes[0], 100) result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, DEFAULT_FEE) assert_equal(result["shieldingUTXOs"], Decimal('50')) assert_equal(result["remainingUTXOs"], Decimal('50')) wait_and_assert_operationid_status(self.nodes[0], result['opid']) # Verify maximum number of utxos which node 0 can shield can be set by the limit parameter result = self.nodes[0].z_shieldcoinbase(mytaddr, myzaddr, DEFAULT_FEE, 33) assert_equal(result["shieldingUTXOs"], Decimal('33')) assert_equal(result["remainingUTXOs"], Decimal('17')) wait_and_assert_operationid_status(self.nodes[0], result['opid']) # Don't sync node 2 which rejects the tx due to its mempooltxinputlimit sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) self.nodes[1].generate(1) self.sync_all() # Note, no "if __name__ == '__main__" and call the test here; it's called from # pool-specific derived classes in wallet_shieldcoinbase_*.py
def sync_nodes(self, mc_nodes): sync_blocks(mc_nodes) sync_mempools(mc_nodes)
def run_test(self): # Check that there's no UTXO on none of the nodes assert_equal(len(self.nodes[0].listunspent()), 0) assert_equal(len(self.nodes[1].listunspent()), 0) assert_equal(len(self.nodes[2].listunspent()), 0) self.log.info("Mining blocks...") self.nodes[0].generate(1) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 50) assert_equal(walletinfo['balance'], 0) self.sync_all([self.nodes[0:3]]) self.nodes[1].generate(101) self.sync_all([self.nodes[0:3]]) assert_equal(self.nodes[0].getbalance(), 50) assert_equal(self.nodes[1].getbalance(), 50) assert_equal(self.nodes[2].getbalance(), 0) # Check that only first and second nodes have UTXOs utxos = self.nodes[0].listunspent() assert_equal(len(utxos), 1) assert_equal(len(self.nodes[1].listunspent()), 1) assert_equal(len(self.nodes[2].listunspent()), 0) self.log.info("test gettxout") confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"] # First, outputs that are unspent both in the chain and in the # mempool should appear with or without include_mempool txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False) assert_equal(txout['value'], 50) txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True) assert_equal(txout['value'], 50) # Send 21 BTC from 0 to 2 using sendtoaddress call. self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) self.log.info("test gettxout (second part)") # utxo spent in mempool should be visible if you exclude mempool # but invisible if you include mempool txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False) assert_equal(txout['value'], 50) txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True) assert txout is None # new utxo from mempool should be invisible if you exclude mempool # but visible if you include mempool txout = self.nodes[0].gettxout(mempool_txid, 0, False) assert txout is None txout1 = self.nodes[0].gettxout(mempool_txid, 0, True) txout2 = self.nodes[0].gettxout(mempool_txid, 1, True) # note the mempool tx will have randomly assigned indices # but 10 will go to node2 and the rest will go to node0 balance = self.nodes[0].getbalance() assert_equal(set([txout1['value'], txout2['value']]), set([10, balance])) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. self.nodes[0].generate(1) self.sync_all([self.nodes[0:3]]) # Exercise locking of unspent outputs unspent_0 = self.nodes[2].listunspent()[0] unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]} assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0]) self.nodes[2].lockunspent(False, [unspent_0]) assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0]) assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20) assert_equal([unspent_0], self.nodes[2].listlockunspent()) self.nodes[2].lockunspent(True, [unspent_0]) assert_equal(len(self.nodes[2].listlockunspent()), 0) assert_raises_rpc_error(-8, "txid must be of length 64 (not 34, for '0000000000000000000000000000000000')", self.nodes[2].lockunspent, False, [{"txid": "0000000000000000000000000000000000", "vout": 0}]) assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[2].lockunspent, False, [{"txid": "ZZZ0000000000000000000000000000000000000000000000000000000000000", "vout": 0}]) assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction", self.nodes[2].lockunspent, False, [{"txid": "0000000000000000000000000000000000000000000000000000000000000000", "vout": 0}]) assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds", self.nodes[2].lockunspent, False, [{"txid": unspent_0["txid"], "vout": 999}]) # An output should be unlocked when spent unspent_0 = self.nodes[1].listunspent()[0] self.nodes[1].lockunspent(False, [unspent_0]) tx = self.nodes[1].createrawtransaction([unspent_0], { self.nodes[1].getnewaddress() : 1 }) tx = self.nodes[1].fundrawtransaction(tx)['hex'] tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"] self.nodes[1].sendrawtransaction(tx) assert_equal(len(self.nodes[1].listlockunspent()), 0) # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].generate(100) self.sync_all([self.nodes[0:3]]) # node0 should end up with 100 btc in block rewards plus fees, but # minus the 21 plus fees sent to node2 assert_equal(self.nodes[0].getbalance(), 100 - 21) assert_equal(self.nodes[2].getbalance(), 21) # Node0 should have two unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 2) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3 raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) # Have node1 mine a block to confirm transactions: self.nodes[1].generate(1) self.sync_all([self.nodes[0:3]]) assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 94) # Verify that a spent output cannot be locked anymore spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]} assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0]) # Send 10 BTC normal address = self.nodes[0].getnewaddress("test") fee_per_byte = Decimal('0.001') / 1000 self.nodes[2].settxfee(fee_per_byte * 1000) txid = self.nodes[2].sendtoaddress(address, 10, "", "", False) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(self.nodes[0].getbalance(), Decimal('10')) # Send 10 BTC with subtract fee from amount txid = self.nodes[2].sendtoaddress(address, 10, "", "", True) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal -= Decimal('10') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) # Sendmany 10 BTC txid = self.nodes[2].sendmany('', {address: 10}, 0, "", []) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_0_bal += Decimal('10') node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(self.nodes[0].getbalance(), node_0_bal) # Sendmany 10 BTC with subtract fee from amount txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [address]) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal -= Decimal('10') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) # Test ResendWalletTransactions: # Create a couple of transactions, then start up a fourth # node (nodes[3]) and ask nodes[0] to rebroadcast. # EXPECT: nodes[3] should have those transactions in its mempool. txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) sync_mempools(self.nodes[0:2]) self.start_node(3) connect_nodes_bi(self.nodes, 0, 3) sync_blocks(self.nodes) relayed = self.nodes[0].resendwallettransactions() assert_equal(set(relayed), {txid1, txid2}) sync_mempools(self.nodes) assert txid1 in self.nodes[3].getrawmempool() # check if we can list zero value tx as available coins # 1. create raw_tx # 2. hex-changed one output to 0.0 # 3. sign and send # 4. check if recipient (node0) can list the zero value tx usp = self.nodes[1].listunspent(query_options={'minimumAmount': '49.998'})[0] inputs = [{"txid": usp['txid'], "vout": usp['vout']}] outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11} raw_tx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") # replace 11.11 with 0.0 (int32) signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(raw_tx) decoded_raw_tx = self.nodes[1].decoderawtransaction(signed_raw_tx['hex']) zero_value_txid = decoded_raw_tx['txid'] self.nodes[1].sendrawtransaction(signed_raw_tx['hex']) self.sync_all() self.nodes[1].generate(1) # mine a block self.sync_all() unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output found = False for uTx in unspent_txs: if uTx['txid'] == zero_value_txid: found = True assert_equal(uTx['amount'], Decimal('0')) assert found # do some -walletbroadcast tests self.stop_nodes() self.start_node(0, ["-walletbroadcast=0"]) self.start_node(1, ["-walletbroadcast=0"]) self.start_node(2, ["-walletbroadcast=0"]) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) self.sync_all([self.nodes[0:3]]) txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) self.nodes[1].generate(1) # mine a block, tx should not be in there self.sync_all([self.nodes[0:3]]) assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted # now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex']) self.nodes[1].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal += 2 tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) assert_equal(self.nodes[2].getbalance(), node_2_bal) # create another tx self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) # restart the nodes with -walletbroadcast=1 self.stop_nodes() self.start_node(0) self.start_node(1) self.start_node(2) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) sync_blocks(self.nodes[0:3]) self.nodes[0].generate(1) sync_blocks(self.nodes[0:3]) node_2_bal += 2 # tx should be added to balance because after restarting the nodes tx should be broadcast assert_equal(self.nodes[2].getbalance(), node_2_bal) # send a tx with value in a string (PR#6380 +) txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-2')) txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-0.0001')) # check if JSON parser can handle scientific notation in strings txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-0.0001')) # General checks for errors from incorrect inputs # This will raise an exception because the amount type is wrong assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4") # This will raise an exception since generate does not accept a string assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2") # This will raise an exception for the invalid private key format assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid") # This will raise an exception for importing an address with the PS2H flag temp_address = self.nodes[1].getnewaddress() assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True) # This will raise an exception for attempting to dump the private key of an address you do not own assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address) # This will raise an exception for attempting to get the private key of an invalid Bitcoin address assert_raises_rpc_error(-5, "Invalid Goldcoin address", self.nodes[0].dumpprivkey, "invalid") # This will raise an exception for attempting to set a label for an invalid Bitcoin address assert_raises_rpc_error(-5, "Invalid Goldcoin address", self.nodes[0].setlabel, "invalid address", "label") # This will raise an exception for importing an invalid address assert_raises_rpc_error(-5, "Invalid Goldcoin address or script", self.nodes[0].importaddress, "invalid") # This will raise an exception for attempting to import a pubkey that isn't in hex assert_raises_rpc_error(-5, "Pubkey must be a hex string", self.nodes[0].importpubkey, "not hex") # This will raise an exception for importing an invalid pubkey assert_raises_rpc_error(-5, "Pubkey is not a valid public key", self.nodes[0].importpubkey, "5361746f736869204e616b616d6f746f") # Import address and private key to check correct behavior of spendable unspents # 1. Send some coins to generate new UTXO address_to_import = self.nodes[2].getnewaddress() txid = self.nodes[0].sendtoaddress(address_to_import, 1) self.nodes[0].generate(1) self.sync_all([self.nodes[0:3]]) # 2. Import address from node2 to node1 self.nodes[1].importaddress(address_to_import) # 3. Validate that the imported address is watch-only on node1 assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"] # 4. Check that the unspents after import are not spendable assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": False}) # 5. Import private key of the previously imported address on node1 priv_key = self.nodes[2].dumpprivkey(address_to_import) self.nodes[1].importprivkey(priv_key) # 6. Check that the unspents are now spendable on node1 assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": True}) # Mine a block from node0 to an address from node1 coinbase_addr = self.nodes[1].getnewaddress() block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0] coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0] self.sync_all([self.nodes[0:3]]) # Check that the txid and balance is found by node1 self.nodes[1].gettransaction(coinbase_txid) # check if wallet or blockchain maintenance changes the balance self.sync_all([self.nodes[0:3]]) blocks = self.nodes[0].generate(2) self.sync_all([self.nodes[0:3]]) balance_nodes = [self.nodes[i].getbalance() for i in range(3)] block_count = self.nodes[0].getblockcount() # Check modes: # - True: unicode escaped as \u.... # - False: unicode directly as UTF-8 for mode in [True, False]: self.nodes[0].rpc.ensure_ascii = mode # unicode check: Basic Multilingual Plane, Supplementary Plane respectively for label in [u'ббаБаА', u'№ Ё']: addr = self.nodes[0].getnewaddress() self.nodes[0].setlabel(addr, label) assert_equal(self.nodes[0].getaddressinfo(addr)['label'], label) assert label in self.nodes[0].listlabels() self.nodes[0].rpc.ensure_ascii = True # restore to default # maintenance tests maintenance = [ '-rescan', '-reindex', '-zapwallettxes=1', '-zapwallettxes=2', # disabled until issue is fixed: https://github.com/bitcoin/bitcoin/issues/7463 # '-salvagewallet', ] chainlimit = 6 for m in maintenance: self.log.info("check " + m) self.stop_nodes() # set lower ancestor limit for later self.start_node(0, [m, "-limitancestorcount=" + str(chainlimit)]) self.start_node(1, [m, "-limitancestorcount=" + str(chainlimit)]) self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)]) if m == '-reindex': # reindex will leave rpc warm up "early"; Wait for it to finish wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)]) assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)]) # Exercise listsinceblock with the last two blocks coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0]) assert_equal(coinbase_tx_1["lastblock"], blocks[1]) assert_equal(len(coinbase_tx_1["transactions"]), 1) assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1]) assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0) # ==Check that wallet prefers to use coins that don't exceed mempool limits ===== # Get all non-zero utxos together chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()] singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True) self.nodes[0].generate(1) node0_balance = self.nodes[0].getbalance() # Split into two chains rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')}) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"]) self.nodes[0].generate(1) # Make a long chain of unconfirmed payments without hitting mempool limit # Each tx we make leaves only one output of change on a chain 1 longer # Since the amount to send is always much less than the outputs, we only ever need one output # So we should be able to generate exactly chainlimit txs for each original output sending_addr = self.nodes[1].getnewaddress() txid_list = [] for i in range(chainlimit * 2): txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001'))) assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2) assert_equal(len(txid_list), chainlimit * 2) # Without walletrejectlongchains, we will still generate a txid # The tx will be stored in the wallet but not accepted to the mempool extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')) assert extra_txid not in self.nodes[0].getrawmempool() assert extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()] self.nodes[0].abandontransaction(extra_txid) total_txs = len(self.nodes[0].listtransactions("*", 99999)) # Try with walletrejectlongchains # Double chain limit but require combining inputs, so we pass SelectCoinsMinConf self.stop_node(0) self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)]) # wait for loadmempool timeout = 10 while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2): time.sleep(0.5) timeout -= 0.5 assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2) node0_balance = self.nodes[0].getbalance() # With walletrejectlongchains we will not create the tx and store it in our wallet. assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01')) # Verify nothing new in wallet assert_equal(total_txs, len(self.nodes[0].listtransactions("*", 99999))) # Test getaddressinfo on external address. Note that these addresses are taken from disablewallet.py assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy") address_info = self.nodes[0].getaddressinfo("mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") assert_equal(address_info['address'], "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac") assert not address_info["ismine"] assert not address_info["iswatchonly"] assert not address_info["isscript"] assert not address_info["ischange"] # Test getaddressinfo 'ischange' field on change address. self.nodes[0].generate(1) destination = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(destination, 0.123) tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex']) output_addresses = [vout['scriptPubKey']['addresses'][0] for vout in tx["vout"]] assert len(output_addresses) > 1 for address in output_addresses: ischange = self.nodes[0].getaddressinfo(address)['ischange'] assert_equal(ischange, address != destination) if ischange: change = address self.nodes[0].setlabel(change, 'foobar') assert_equal(self.nodes[0].getaddressinfo(change)['ischange'], False)
def sync_all(self): sync_blocks(self.nodes) sync_mempools(self.nodes)
def run_test(self): def total_fees(*txids): total = 0 for txid in txids: ctx = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid)) total += self.nodes[0].calculate_fee_from_txid(txid) return satoshi_round(total) self.nodes[1].generate(100) sync_blocks(self.nodes) balance = self.nodes[0].getbalance() txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) sync_mempools(self.nodes) self.nodes[1].generate(1) sync_blocks(self.nodes) newbalance = self.nodes[0].getbalance() # no more than fees lost assert (balance - newbalance <= total_fees(txA, txB, txC)) balance = newbalance # Disconnect nodes so node0's transactions don't get into node1's mempool disconnect_nodes(self.nodes[0], self.nodes[1]) # Identify the 10btc outputs nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txA, 1)["vout"]) if vout["value"] == Decimal("10")) nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txB, 1)["vout"]) if vout["value"] == Decimal("10")) nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txC, 1)["vout"]) if vout["value"] == Decimal("10")) inputs = [] # spend 10btc outputs from txA and txB inputs.append({"txid": txA, "vout": nA}) inputs.append({"txid": txB, "vout": nB}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998") outputs[self.nodes[1].getnewaddress()] = Decimal("5") signed = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) txAB1 = self.nodes[0].sendrawtransaction(signed["hex"]) # Identify the 14.99998btc output nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998")) # Create a child tx spending AB1 and C inputs = [] # Amount 14.99998 BCH inputs.append({"txid": txAB1, "vout": nAB}) # Amount 10 BCH inputs.append({"txid": txC, "vout": nC}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996") signed2 = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"]) # Create a child tx spending ABC2 signed3_change = Decimal("24.999") inputs = [{"txid": txABC2, "vout": 0}] outputs = {self.nodes[0].getnewaddress(): signed3_change} signed3 = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) # note tx is never directly referenced, only abandoned as a child of the above self.nodes[0].sendrawtransaction(signed3["hex"]) # In mempool txs from self should increase balance from change newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("30") + signed3_change) balance = newbalance # Restart the node with a higher min relay fee so the parent tx is no longer in mempool # TODO: redo with eviction self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) # Verify txs no longer in either node's mempool assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) # Transactions which are not in the mempool should only reduce wallet balance. # Transaction inputs should still be spent, but the change not yet received. newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - signed3_change) # Unconfirmed received funds that are not in mempool also shouldn't show # up in unconfirmed balance. Note that the transactions stored in the wallet # are not necessarily in the node's mempool. unconfbalance = self.nodes[0].getunconfirmedbalance( ) + self.nodes[0].getbalance() assert_equal(unconfbalance, newbalance) # Unconfirmed transactions which are not in the mempool should also # not be in listunspent assert (not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)]) balance = newbalance # Abandon original transaction and verify inputs are available again # including that the child tx was also abandoned self.nodes[0].abandontransaction(txAB1) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("30")) balance = newbalance # Verify that even with a low min relay fee, the tx is not re-accepted # from wallet on startup once abandoned. self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.00001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(self.nodes[0].getbalance(), balance) # If the transaction is re-sent the wallet also unabandons it. The # change should be available, and it's child transaction should remain # abandoned. # NOTE: Abandoned transactions are internal to the wallet, and tracked # separately from other indices. self.nodes[0].sendrawtransaction(signed["hex"]) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998")) balance = newbalance # Send child tx again so it is not longer abandoned. self.nodes[0].sendrawtransaction(signed2["hex"]) newbalance = self.nodes[0].getbalance() assert_equal( newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996")) balance = newbalance # Reset to a higher relay fee so that we abandon a transaction self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("24.9996")) balance = newbalance # Create a double spend of AB1. Spend it again from only A's 10 output. # Mine double spend from node 1. inputs = [] inputs.append({"txid": txA, "vout": nA}) outputs = {} outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999") tx = self.nodes[0].createrawtransaction(inputs, outputs) signed = self.nodes[0].signrawtransactionwithwallet(tx) self.nodes[1].sendrawtransaction(signed["hex"]) self.nodes[1].generate(1) connect_nodes(self.nodes[0], self.nodes[1]) sync_blocks(self.nodes) # Verify that B and C's 10 BCH outputs are available for spending again because AB1 is now conflicted newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("20")) balance = newbalance # There is currently a minor bug around this and so this test doesn't work. See Issue #7315 # Invalidate the block with the double spend and B's 10 BCH output should no longer be available # Don't think C's should either self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) newbalance = self.nodes[0].getbalance() #assert_equal(newbalance, balance - Decimal("10")) self.log.info( "If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer" ) self.log.info( "conflicted has not resumed causing its inputs to be seen as spent. See Issue #7315" ) self.log.info(str(balance) + " -> " + str(newbalance) + " ?")
def run_rbf_opt_in_test(self): # Check whether a transaction signals opt-in RBF itself def is_opt_in(node, txid): rawtx = node.getrawtransaction(txid, 1) for x in rawtx["vin"]: if x["sequence"] < 0xfffffffe: return True return False # Find an unconfirmed output matching a certain txid def get_unconfirmed_utxo_entry(node, txid_to_match): utxo = node.listunspent(0, 0) for i in utxo: if i["txid"] == txid_to_match: return i return None # 1. Chain a few transactions that don't opt-in. txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) assert (not is_opt_in(self.nodes[0], txid_1)) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"}) # Tx2 will build off txid_1, still not opting in to RBF. utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_1) assert_equal(utxo_to_use["safe"], True) utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) assert_equal(utxo_to_use["safe"], False) # Create tx2 using createrawtransaction inputs = [{"txid": utxo_to_use["txid"], "vout": utxo_to_use["vout"]}] outputs = {self.nodes[0].getnewaddress(): 0.999} tx2 = self.nodes[1].createrawtransaction(inputs, outputs) tx2_signed = self.nodes[1].signrawtransactionwithwallet(tx2)["hex"] txid_2 = self.nodes[1].sendrawtransaction(tx2_signed) # ...and check the result assert (not is_opt_in(self.nodes[1], txid_2)) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"}) sync_mempools(self.nodes) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"}) # Tx3 will opt-in to RBF utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2) inputs = [{"txid": txid_2, "vout": utxo_to_use["vout"]}] outputs = {self.nodes[1].getnewaddress(): 0.998} tx3 = self.nodes[0].createrawtransaction(inputs, outputs) tx3_modified = tx_from_hex(tx3) tx3_modified.vin[0].nSequence = 0 tx3 = bytes_to_hex_str(tx3_modified.serialize()) tx3_signed = self.nodes[0].signrawtransactionwithwallet(tx3)['hex'] txid_3 = self.nodes[0].sendrawtransaction(tx3_signed) assert (is_opt_in(self.nodes[0], txid_3)) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"}) # Tx4 will chain off tx3. Doesn't signal itself, but depends on one # that does. utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3) inputs = [{"txid": txid_3, "vout": utxo_to_use["vout"]}] outputs = {self.nodes[0].getnewaddress(): 0.997} tx4 = self.nodes[1].createrawtransaction(inputs, outputs) tx4_signed = self.nodes[1].signrawtransactionwithwallet(tx4)["hex"] txid_4 = self.nodes[1].sendrawtransaction(tx4_signed) assert (not is_opt_in(self.nodes[1], txid_4)) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"}) sync_mempools(self.nodes) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"}) # Replace tx3, and check that tx4 becomes unknown tx3_b = tx3_modified tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee tx3_b = bytes_to_hex_str(tx3_b.serialize()) tx3_b_signed = self.nodes[0].signrawtransactionwithwallet(tx3_b)['hex'] txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True) assert (is_opt_in(self.nodes[0], txid_3b)) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"}) # Check gettransaction as well: for n in self.nodes[0:2]: assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no") assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no") assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes") assert_equal( n.gettransaction(txid_3b)["bip125-replaceable"], "yes") assert_equal( n.gettransaction(txid_4)["bip125-replaceable"], "unknown") # After mining a transaction, it's no longer BIP125-replaceable self.nodes[0].generate(1) assert (txid_3b not in self.nodes[0].getrawmempool()) assert_equal( self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no") assert_equal( self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown")
def run_test(self): self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress()) sync_blocks(self.nodes) # Sanity check the test framework: res = self.nodes[self.num_nodes - 1].getblockchaininfo() assert_equal(res['blocks'], 101) node_master = self.nodes[self.num_nodes - 4] node_v19 = self.nodes[self.num_nodes - 3] node_v18 = self.nodes[self.num_nodes - 2] node_v17 = self.nodes[self.num_nodes - 1] self.log.info("Test wallet backwards compatibility...") # Create a number of wallets and open them in older versions: # w1: regular wallet, created on master: update this test when default # wallets can no longer be opened by older versions. node_master.createwallet(wallet_name="w1") wallet = node_master.get_wallet_rpc("w1") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] > 0 # Create a confirmed transaction, receiving coins address = wallet.getnewaddress() self.nodes[0].sendtoaddress(address, 10) sync_mempools(self.nodes) self.nodes[0].generate(1) sync_blocks(self.nodes) # Create a conflicting transaction using RBF return_address = self.nodes[0].getnewaddress() tx1_id = self.nodes[1].sendtoaddress(return_address, 1) tx2_id = self.nodes[1].bumpfee(tx1_id)["txid"] # Confirm the transaction sync_mempools(self.nodes) self.nodes[0].generate(1) sync_blocks(self.nodes) # Create another conflicting transaction using RBF tx3_id = self.nodes[1].sendtoaddress(return_address, 1) tx4_id = self.nodes[1].bumpfee(tx3_id)["txid"] # Abandon transaction, but don't confirm self.nodes[1].abandontransaction(tx3_id) # w1_v19: regular wallet, created with v0.19 node_v19.createwallet(wallet_name="w1_v19") wallet = node_v19.get_wallet_rpc("w1_v19") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] > 0 # Use addmultisigaddress (see #18075) address_18075 = wallet.addmultisigaddress(1, [ "0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52", "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073" ], "", "legacy")["address"] assert wallet.getaddressinfo(address_18075)["solvable"] # w1_v18: regular wallet, created with v0.18 node_v18.createwallet(wallet_name="w1_v18") wallet = node_v18.get_wallet_rpc("w1_v18") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] > 0 # w2: wallet with private keys disabled, created on master: update this # test when default wallets private keys disabled can no longer be # opened by older versions. node_master.createwallet(wallet_name="w2", disable_private_keys=True) wallet = node_master.get_wallet_rpc("w2") info = wallet.getwalletinfo() assert info['private_keys_enabled'] == False assert info['keypoolsize'] == 0 # w2_v19: wallet with private keys disabled, created with v0.19 node_v19.createwallet(wallet_name="w2_v19", disable_private_keys=True) wallet = node_v19.get_wallet_rpc("w2_v19") info = wallet.getwalletinfo() assert info['private_keys_enabled'] == False assert info['keypoolsize'] == 0 # w2_v18: wallet with private keys disabled, created with v0.18 node_v18.createwallet(wallet_name="w2_v18", disable_private_keys=True) wallet = node_v18.get_wallet_rpc("w2_v18") info = wallet.getwalletinfo() assert info['private_keys_enabled'] == False assert info['keypoolsize'] == 0 # w3: blank wallet, created on master: update this # test when default blank wallets can no longer be opened by older versions. node_master.createwallet(wallet_name="w3", blank=True) wallet = node_master.get_wallet_rpc("w3") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] == 0 # w3_v19: blank wallet, created with v0.19 node_v19.createwallet(wallet_name="w3_v19", blank=True) wallet = node_v19.get_wallet_rpc("w3_v19") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] == 0 # w3_v18: blank wallet, created with v0.18 node_v18.createwallet(wallet_name="w3_v18", blank=True) wallet = node_v18.get_wallet_rpc("w3_v18") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] == 0 # Copy the wallets to older nodes: node_master_wallets_dir = os.path.join(node_master.datadir, "regtest/wallets") node_v19_wallets_dir = os.path.join(node_v19.datadir, "regtest/wallets") node_v18_wallets_dir = os.path.join(node_v18.datadir, "regtest/wallets") node_v17_wallets_dir = os.path.join(node_v17.datadir, "regtest/wallets") node_master.unloadwallet("w1") node_master.unloadwallet("w2") node_v19.unloadwallet("w1_v19") node_v19.unloadwallet("w2_v19") node_v18.unloadwallet("w1_v18") node_v18.unloadwallet("w2_v18") # Copy wallets to v0.17 for wallet in os.listdir(node_master_wallets_dir): shutil.copytree(os.path.join(node_master_wallets_dir, wallet), os.path.join(node_v17_wallets_dir, wallet)) for wallet in os.listdir(node_v18_wallets_dir): shutil.copytree(os.path.join(node_v18_wallets_dir, wallet), os.path.join(node_v17_wallets_dir, wallet)) # Copy wallets to v0.18 for wallet in os.listdir(node_master_wallets_dir): shutil.copytree(os.path.join(node_master_wallets_dir, wallet), os.path.join(node_v18_wallets_dir, wallet)) # Copy wallets to v0.19 for wallet in os.listdir(node_master_wallets_dir): shutil.copytree(os.path.join(node_master_wallets_dir, wallet), os.path.join(node_v19_wallets_dir, wallet)) # Open the wallets in v0.19 node_v19.loadwallet("w1") wallet = node_v19.get_wallet_rpc("w1") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] > 0 txs = wallet.listtransactions() assert_equal(len(txs), 5) assert_equal(txs[1]["txid"], tx1_id) assert_equal(txs[2]["walletconflicts"], [tx1_id]) assert_equal(txs[1]["replaced_by_txid"], tx2_id) assert not (txs[1]["abandoned"]) assert_equal(txs[1]["confirmations"], -1) assert_equal(txs[2]["blockindex"], 1) assert txs[3]["abandoned"] assert_equal(txs[4]["walletconflicts"], [tx3_id]) assert_equal(txs[3]["replaced_by_txid"], tx4_id) assert not (hasattr(txs[3], "blockindex")) node_v19.loadwallet("w2") wallet = node_v19.get_wallet_rpc("w2") info = wallet.getwalletinfo() assert info['private_keys_enabled'] == False assert info['keypoolsize'] == 0 node_v19.loadwallet("w3") wallet = node_v19.get_wallet_rpc("w3") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] == 0 # Open the wallets in v0.18 node_v18.loadwallet("w1") wallet = node_v18.get_wallet_rpc("w1") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] > 0 txs = wallet.listtransactions() assert_equal(len(txs), 5) assert_equal(txs[1]["txid"], tx1_id) assert_equal(txs[2]["walletconflicts"], [tx1_id]) assert_equal(txs[1]["replaced_by_txid"], tx2_id) assert not (txs[1]["abandoned"]) assert_equal(txs[1]["confirmations"], -1) assert_equal(txs[2]["blockindex"], 1) assert txs[3]["abandoned"] assert_equal(txs[4]["walletconflicts"], [tx3_id]) assert_equal(txs[3]["replaced_by_txid"], tx4_id) assert not (hasattr(txs[3], "blockindex")) node_v18.loadwallet("w2") wallet = node_v18.get_wallet_rpc("w2") info = wallet.getwalletinfo() assert info['private_keys_enabled'] == False assert info['keypoolsize'] == 0 node_v18.loadwallet("w3") wallet = node_v18.get_wallet_rpc("w3") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] == 0 # Open the wallets in v0.17 node_v17.loadwallet("w1_v18") wallet = node_v17.get_wallet_rpc("w1_v18") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] > 0 node_v17.loadwallet("w1") wallet = node_v17.get_wallet_rpc("w1") info = wallet.getwalletinfo() assert info['private_keys_enabled'] assert info['keypoolsize'] > 0 node_v17.loadwallet("w2_v18") wallet = node_v17.get_wallet_rpc("w2_v18") info = wallet.getwalletinfo() assert info['private_keys_enabled'] == False assert info['keypoolsize'] == 0 node_v17.loadwallet("w2") wallet = node_v17.get_wallet_rpc("w2") info = wallet.getwalletinfo() assert info['private_keys_enabled'] == False assert info['keypoolsize'] == 0 # RPC loadwallet failure causes shirecoind to exit, in addition to the RPC # call failure, so the following test won't work: # assert_raises_rpc_error(-4, "Wallet loading failed.", node_v17.loadwallet, 'w3_v18') # Instead, we stop node and try to launch it with the wallet: self.stop_node(self.num_nodes - 1) node_v17.assert_start_raises_init_error([ "-wallet=w3_v18" ], "Error: Error loading w3_v18: Wallet requires newer version of Shirecoin Core" ) node_v17.assert_start_raises_init_error([ "-wallet=w3" ], "Error: Error loading w3: Wallet requires newer version of Shirecoin Core" ) self.start_node(self.num_nodes - 1) self.log.info("Test wallet upgrade path...") # u1: regular wallet, created with v0.17 node_v17.createwallet(wallet_name="u1_v17") wallet = node_v17.get_wallet_rpc("u1_v17") address = wallet.getnewaddress("bech32") info = wallet.getaddressinfo(address) hdkeypath = info["hdkeypath"] pubkey = info["pubkey"] # Copy the 0.17 wallet to the last Shirecoin Core version and open it: node_v17.unloadwallet("u1_v17") shutil.copytree(os.path.join(node_v17_wallets_dir, "u1_v17"), os.path.join(node_master_wallets_dir, "u1_v17")) node_master.loadwallet("u1_v17") wallet = node_master.get_wallet_rpc("u1_v17") info = wallet.getaddressinfo(address) descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[ 1:] + "]" + pubkey + ")" assert_equal(info["desc"], descsum_create(descriptor)) # Copy the 0.19 wallet to the last Shirecoin Core version and open it: shutil.copytree(os.path.join(node_v19_wallets_dir, "w1_v19"), os.path.join(node_master_wallets_dir, "w1_v19")) node_master.loadwallet("w1_v19") wallet = node_master.get_wallet_rpc("w1_v19") assert wallet.getaddressinfo(address_18075)["solvable"]
def run_test(self): print "Mining blocks..." self.nodes[0].generate(4) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 40) assert_equal(walletinfo['balance'], 0) self.sync_all() self.nodes[1].generate(721) self.sync_all() assert_equal(self.nodes[0].getbalance(), 40) assert_equal(self.nodes[1].getbalance(), 10) assert_equal(self.nodes[2].getbalance(), 0) assert_equal(self.nodes[0].getbalance("*"), 40) assert_equal(self.nodes[1].getbalance("*"), 10) assert_equal(self.nodes[2].getbalance("*"), 0) # Send 21 ZERO from 0 to 2 using sendtoaddress call. # Second transaction will be child of first, and will require a fee self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. self.sync_all() self.nodes[0].generate(1) self.sync_all() # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].generate(720) self.sync_all() # node0 should end up with 50 btc in block rewards plus fees, but # minus the 21 plus fees sent to node2 assert_equal(self.nodes[0].getbalance(), 50 - 21) assert_equal(self.nodes[2].getbalance(), 21) assert_equal(self.nodes[0].getbalance("*"), 50 - 21) assert_equal(self.nodes[2].getbalance("*"), 21) # Node0 should have three unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 3) # Check 'generated' field of listunspent # Node 0: has one coinbase utxo and two regular utxos assert_equal( sum(int(uxto["generated"] is True) for uxto in node0utxos), 1) # Node 1: has 721 coinbase utxos and no regular utxos node1utxos = self.nodes[1].listunspent(1) assert_equal(len(node1utxos), 721) assert_equal( sum(int(uxto["generated"] is True) for uxto in node1utxos), 721) # Node 2: has no coinbase utxos and two regular utxos node2utxos = self.nodes[2].listunspent(1) assert_equal(len(node2utxos), 2) assert_equal( sum(int(uxto["generated"] is True) for uxto in node2utxos), 0) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[2].getnewaddress("")] = utxo["amount"] raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[2]["hex"], True) # Have node1 mine a block to confirm transactions: self.sync_all() self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 50) assert_equal(self.nodes[0].getbalance("*"), 0) assert_equal(self.nodes[2].getbalance("*"), 50) # Send 10 ZERO normal address = self.nodes[0].getnewaddress("") self.nodes[2].settxfee(Decimal('0.001')) self.nodes[2].sendtoaddress(address, 10, "", "", False) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('39.99900000')) assert_equal(self.nodes[0].getbalance(), Decimal('10.00000000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('39.99900000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('10.00000000')) # Send 10 BTC with subtract fee from amount self.nodes[2].sendtoaddress(address, 10, "", "", True) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('29.99900000')) assert_equal(self.nodes[0].getbalance(), Decimal('19.99900000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('29.99900000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('19.99900000')) # Sendmany 10 BTC self.nodes[2].sendmany("", {address: 10}, 0, "", []) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('19.99800000')) assert_equal(self.nodes[0].getbalance(), Decimal('29.99900000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('19.99800000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('29.99900000')) # Sendmany 10 BTC with subtract fee from amount self.nodes[2].sendmany("", {address: 10}, 0, "", [address]) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('9.99800000')) assert_equal(self.nodes[0].getbalance(), Decimal('39.99800000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('9.99800000')) assert_equal(self.nodes[0].getbalance("*"), Decimal('39.99800000')) # Test ResendWalletTransactions: # Create a couple of transactions, then start up a fourth # node (nodes[3]) and ask nodes[0] to rebroadcast. # EXPECT: nodes[3] should have those transactions in its mempool. txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) sync_mempools(self.nodes) self.nodes.append(start_node(3, self.options.tmpdir)) connect_nodes_bi(self.nodes, 0, 3) sync_blocks(self.nodes) relayed = self.nodes[0].resendwallettransactions() assert_equal(set(relayed), set([txid1, txid2])) sync_mempools(self.nodes) assert (txid1 in self.nodes[3].getrawmempool()) #check if we can list zero value tx as available coins #1. create rawtx #2. hex-changed one output to 0.0 #3. sign and send #4. check if recipient (node0) can list the zero value tx usp = self.nodes[1].listunspent() inputs = [{"txid": usp[0]['txid'], "vout": usp[0]['vout']}] outputs = { self.nodes[1].getnewaddress(): 9.998, self.nodes[0].getnewaddress(): 11.11 } rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace( "c0833842", "00000000") #replace 11.11 with 0.0 (int32) decRawTx = self.nodes[1].decoderawtransaction(rawTx) signedRawTx = self.nodes[1].signrawtransaction(rawTx) decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex']) zeroValueTxid = decRawTx['txid'] self.nodes[1].sendrawtransaction(signedRawTx['hex']) self.sync_all() self.nodes[1].generate(1) #mine a block self.sync_all() unspentTxs = self.nodes[0].listunspent( ) #zero value tx must be in listunspents output found = False for uTx in unspentTxs: if uTx['txid'] == zeroValueTxid: found = True assert_equal(uTx['amount'], Decimal('0.00000000')) assert (found) #do some -walletbroadcast tests stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes( 3, self.options.tmpdir, [["-walletbroadcast=0"], ["-walletbroadcast=0"], ["-walletbroadcast=0"]]) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) self.sync_all() txIdNotBroadcasted = self.nodes[0].sendtoaddress( self.nodes[2].getnewaddress(), 2) txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) self.sync_all() self.nodes[1].generate(1) #mine a block, tx should not be in there self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('9.99800000')) #should not be changed because tx was not broadcasted assert_equal(self.nodes[2].getbalance("*"), Decimal('9.99800000')) #should not be changed because tx was not broadcasted #now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex']) self.sync_all() self.nodes[1].generate(1) self.sync_all() txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) assert_equal(self.nodes[2].getbalance(), Decimal('11.99800000')) #should not be assert_equal(self.nodes[2].getbalance("*"), Decimal('11.99800000')) #should not be #create another tx txIdNotBroadcasted = self.nodes[0].sendtoaddress( self.nodes[2].getnewaddress(), 2) #restart the nodes with -walletbroadcast=1 stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes(3, self.options.tmpdir) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) sync_blocks(self.nodes) self.nodes[0].generate(1) sync_blocks(self.nodes) #tx should be added to balance because after restarting the nodes tx should be broadcastet assert_equal(self.nodes[2].getbalance(), Decimal('13.99800000')) #should not be assert_equal(self.nodes[2].getbalance("*"), Decimal('13.99800000')) #should not be # send from node 0 to node 2 taddr mytaddr = self.nodes[2].getnewaddress() mytxid = self.nodes[0].sendtoaddress(mytaddr, 10.0) self.sync_all() self.nodes[0].generate(1) self.sync_all() mybalance = self.nodes[2].z_getbalance(mytaddr) assert_equal(mybalance, Decimal('10.0')) mytxdetails = self.nodes[2].gettransaction(mytxid) myvjoinsplits = mytxdetails["vjoinsplit"] assert_equal(0, len(myvjoinsplits)) # z_sendmany is expected to fail if tx size breaks limit myzaddr = self.nodes[0].z_getnewaddress() recipients = [] num_t_recipients = 3000 amount_per_recipient = Decimal('0.00000001') errorString = '' for i in xrange(0, num_t_recipients): newtaddr = self.nodes[2].getnewaddress() recipients.append({ "address": newtaddr, "amount": amount_per_recipient }) # Issue #2759 Workaround START # HTTP connection to node 0 may fall into a state, during the few minutes it takes to process # loop above to create new addresses, that when z_sendmany is called with a large amount of # rpc data in recipients, the connection fails with a 'broken pipe' error. Making a RPC call # to node 0 before calling z_sendmany appears to fix this issue, perhaps putting the HTTP # connection into a good state to handle a large amount of data in recipients. self.nodes[0].getinfo() # Issue #2759 Workaround END try: self.nodes[0].z_sendmany(myzaddr, recipients) except JSONRPCException, e: errorString = e.error['message']
def run_test(self): # Check that there's no UTXO on none of the nodes assert_equal(len(self.nodes[0].listunspent()), 0) assert_equal(len(self.nodes[1].listunspent()), 0) assert_equal(len(self.nodes[2].listunspent()), 0) self.log.info("Mining blocks...") self.nodes[0].generate(1) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 50) assert_equal(walletinfo['balance'], 0) self.sync_all([self.nodes[0:3]]) self.nodes[1].generate(101) self.sync_all([self.nodes[0:3]]) assert_equal(self.nodes[0].getbalance(), 50) assert_equal(self.nodes[1].getbalance(), 50) assert_equal(self.nodes[2].getbalance(), 0) # Check that only first and second nodes have UTXOs utxos = self.nodes[0].listunspent() assert_equal(len(utxos), 1) assert_equal(len(self.nodes[1].listunspent()), 1) assert_equal(len(self.nodes[2].listunspent()), 0) self.log.info("test gettxout") confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"] # First, outputs that are unspent both in the chain and in the # mempool should appear with or without include_mempool txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False) assert_equal(txout['value'], 50) txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True) assert_equal(txout['value'], 50) # Send 21 DGB from 0 to 2 using sendtoaddress call. # Locked memory should use at least 32 bytes to sign each transaction self.log.info("test getmemoryinfo") memory_before = self.nodes[0].getmemoryinfo() self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) memory_after = self.nodes[0].getmemoryinfo() assert(memory_before['locked']['used'] + 64 <= memory_after['locked']['used']) self.log.info("test gettxout (second part)") # utxo spent in mempool should be visible if you exclude mempool # but invisible if you include mempool txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False) assert_equal(txout['value'], 50) txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True) assert txout is None # new utxo from mempool should be invisible if you exclude mempool # but visible if you include mempool txout = self.nodes[0].gettxout(mempool_txid, 0, False) assert txout is None txout1 = self.nodes[0].gettxout(mempool_txid, 0, True) txout2 = self.nodes[0].gettxout(mempool_txid, 1, True) # note the mempool tx will have randomly assigned indices # but 10 will go to node2 and the rest will go to node0 balance = self.nodes[0].getbalance() assert_equal(set([txout1['value'], txout2['value']]), set([10, balance])) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. self.nodes[0].generate(1) self.sync_all([self.nodes[0:3]]) # Exercise locking of unspent outputs unspent_0 = self.nodes[2].listunspent()[0] unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]} assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0]) self.nodes[2].lockunspent(False, [unspent_0]) assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0]) assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20) assert_equal([unspent_0], self.nodes[2].listlockunspent()) self.nodes[2].lockunspent(True, [unspent_0]) assert_equal(len(self.nodes[2].listlockunspent()), 0) assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction", self.nodes[2].lockunspent, False, [{"txid": "0000000000000000000000000000000000", "vout": 0}]) assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds", self.nodes[2].lockunspent, False, [{"txid": unspent_0["txid"], "vout": 999}]) # An output should be unlocked when spent unspent_0 = self.nodes[1].listunspent()[0] self.nodes[1].lockunspent(False, [unspent_0]) tx = self.nodes[1].createrawtransaction([unspent_0], { self.nodes[1].getnewaddress() : 1 }) tx = self.nodes[1].fundrawtransaction(tx)['hex'] tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"] self.nodes[1].sendrawtransaction(tx) assert_equal(len(self.nodes[1].listlockunspent()), 0) # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].generate(100) self.sync_all([self.nodes[0:3]]) # node0 should end up with 100 dgb in block rewards plus fees, but # minus the 21 plus fees sent to node2 assert_equal(self.nodes[0].getbalance(), 100 - 21) assert_equal(self.nodes[2].getbalance(), 21) # Node0 should have two unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 2) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3 raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) # Have node1 mine a block to confirm transactions: self.nodes[1].generate(1) self.sync_all([self.nodes[0:3]]) assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 94) # Verify that a spent output cannot be locked anymore spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]} assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0]) # Send 10 DGB normal address = self.nodes[0].getnewaddress("test") fee_per_byte = Decimal('0.001') / 1000 self.nodes[2].settxfee(fee_per_byte * 1000) txid = self.nodes[2].sendtoaddress(address, 10, "", "", False) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) assert_equal(self.nodes[0].getbalance(), Decimal('10')) # Send 10 DGB with subtract fee from amount txid = self.nodes[2].sendtoaddress(address, 10, "", "", True) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal -= Decimal('10') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) # Sendmany 10 DGB txid = self.nodes[2].sendmany('', {address: 10}, 0, "", []) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_0_bal += Decimal('10') node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) assert_equal(self.nodes[0].getbalance(), node_0_bal) # Sendmany 10 DGB with subtract fee from amount txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [address]) self.nodes[2].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal -= Decimal('10') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) # Test ResendWalletTransactions: # Create a couple of transactions, then start up a fourth # node (nodes[3]) and ask nodes[0] to rebroadcast. # EXPECT: nodes[3] should have those transactions in its mempool. txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) sync_mempools(self.nodes[0:2]) self.start_node(3) connect_nodes_bi(self.nodes, 0, 3) sync_blocks(self.nodes) relayed = self.nodes[0].resendwallettransactions() assert_equal(set(relayed), {txid1, txid2}) sync_mempools(self.nodes) assert(txid1 in self.nodes[3].getrawmempool()) # check if we can list zero value tx as available coins # 1. create raw_tx # 2. hex-changed one output to 0.0 # 3. sign and send # 4. check if recipient (node0) can list the zero value tx usp = self.nodes[1].listunspent(query_options={'minimumAmount': '49.998'})[0] inputs = [{"txid": usp['txid'], "vout": usp['vout']}] outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11} raw_tx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") # replace 11.11 with 0.0 (int32) signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(raw_tx) decoded_raw_tx = self.nodes[1].decoderawtransaction(signed_raw_tx['hex']) zero_value_txid = decoded_raw_tx['txid'] self.nodes[1].sendrawtransaction(signed_raw_tx['hex']) self.sync_all() self.nodes[1].generate(1) # mine a block self.sync_all() unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output found = False for uTx in unspent_txs: if uTx['txid'] == zero_value_txid: found = True assert_equal(uTx['amount'], Decimal('0')) assert(found) # do some -walletbroadcast tests self.stop_nodes() self.start_node(0, ["-walletbroadcast=0"]) self.start_node(1, ["-walletbroadcast=0"]) self.start_node(2, ["-walletbroadcast=0"]) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) self.sync_all([self.nodes[0:3]]) txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) self.nodes[1].generate(1) # mine a block, tx should not be in there self.sync_all([self.nodes[0:3]]) assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted # now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex']) self.nodes[1].generate(1) self.sync_all([self.nodes[0:3]]) node_2_bal += 2 tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) assert_equal(self.nodes[2].getbalance(), node_2_bal) # create another tx self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) # restart the nodes with -walletbroadcast=1 self.stop_nodes() self.start_node(0) self.start_node(1) self.start_node(2) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) sync_blocks(self.nodes[0:3]) self.nodes[0].generate(1) sync_blocks(self.nodes[0:3]) node_2_bal += 2 # tx should be added to balance because after restarting the nodes tx should be broadcast assert_equal(self.nodes[2].getbalance(), node_2_bal) # send a tx with value in a string (PR#6380 +) txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-2')) txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-0.0001')) # check if JSON parser can handle scientific notation in strings txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-0.0001')) # This will raise an exception because the amount type is wrong assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4") # This will raise an exception since generate does not accept a string assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2") # Import address and private key to check correct behavior of spendable unspents # 1. Send some coins to generate new UTXO address_to_import = self.nodes[2].getnewaddress() txid = self.nodes[0].sendtoaddress(address_to_import, 1) self.nodes[0].generate(1) self.sync_all([self.nodes[0:3]]) # 2. Import address from node2 to node1 self.nodes[1].importaddress(address_to_import) # 3. Validate that the imported address is watch-only on node1 assert(self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]) # 4. Check that the unspents after import are not spendable assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": False}) # 5. Import private key of the previously imported address on node1 priv_key = self.nodes[2].dumpprivkey(address_to_import) self.nodes[1].importprivkey(priv_key) # 6. Check that the unspents are now spendable on node1 assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": True}) # Mine a block from node0 to an address from node1 coinbase_addr = self.nodes[1].getnewaddress() block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0] coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0] self.sync_all([self.nodes[0:3]]) # Check that the txid and balance is found by node1 self.nodes[1].gettransaction(coinbase_txid) # check if wallet or blockchain maintenance changes the balance self.sync_all([self.nodes[0:3]]) blocks = self.nodes[0].generate(2) self.sync_all([self.nodes[0:3]]) balance_nodes = [self.nodes[i].getbalance() for i in range(3)] block_count = self.nodes[0].getblockcount() # Check modes: # - True: unicode escaped as \u.... # - False: unicode directly as UTF-8 for mode in [True, False]: self.nodes[0].rpc.ensure_ascii = mode # unicode check: Basic Multilingual Plane, Supplementary Plane respectively for label in [u'ббаБаА', u'№ Ё']: addr = self.nodes[0].getnewaddress() self.nodes[0].setlabel(addr, label) assert_equal(self.nodes[0].getaddressinfo(addr)['label'], label) assert(label in self.nodes[0].listlabels()) self.nodes[0].rpc.ensure_ascii = True # restore to default # maintenance tests maintenance = [ '-rescan', '-reindex', '-zapwallettxes=1', '-zapwallettxes=2', # disabled until issue is fixed: https://github.com/digibyte/digibyte/issues/7463 # '-salvagewallet', ] chainlimit = 6 for m in maintenance: self.log.info("check " + m) self.stop_nodes() # set lower ancestor limit for later self.start_node(0, [m, "-limitancestorcount=" + str(chainlimit)]) self.start_node(1, [m, "-limitancestorcount=" + str(chainlimit)]) self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)]) if m == '-reindex': # reindex will leave rpc warm up "early"; Wait for it to finish wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)]) assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)]) # Exercise listsinceblock with the last two blocks coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0]) assert_equal(coinbase_tx_1["lastblock"], blocks[1]) assert_equal(len(coinbase_tx_1["transactions"]), 1) assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1]) assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0) # ==Check that wallet prefers to use coins that don't exceed mempool limits ===== # Get all non-zero utxos together chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()] singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True) self.nodes[0].generate(1) node0_balance = self.nodes[0].getbalance() # Split into two chains rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')}) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"]) self.nodes[0].generate(1) # Make a long chain of unconfirmed payments without hitting mempool limit # Each tx we make leaves only one output of change on a chain 1 longer # Since the amount to send is always much less than the outputs, we only ever need one output # So we should be able to generate exactly chainlimit txs for each original output sending_addr = self.nodes[1].getnewaddress() txid_list = [] for i in range(chainlimit * 2): txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001'))) assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2) assert_equal(len(txid_list), chainlimit * 2) # Without walletrejectlongchains, we will still generate a txid # The tx will be stored in the wallet but not accepted to the mempool extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')) assert(extra_txid not in self.nodes[0].getrawmempool()) assert(extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()]) self.nodes[0].abandontransaction(extra_txid) total_txs = len(self.nodes[0].listtransactions("*", 99999)) # Try with walletrejectlongchains # Double chain limit but require combining inputs, so we pass SelectCoinsMinConf self.stop_node(0) self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)]) # wait for loadmempool timeout = 10 while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2): time.sleep(0.5) timeout -= 0.5 assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2) node0_balance = self.nodes[0].getbalance() # With walletrejectlongchains we will not create the tx and store it in our wallet. assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01')) # Verify nothing new in wallet assert_equal(total_txs, len(self.nodes[0].listtransactions("*", 99999))) # Test getaddressinfo. Note that these addresses are taken from disablewallet.py assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy") address_info = self.nodes[0].getaddressinfo("mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") assert_equal(address_info['address'], "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac") assert not address_info["ismine"] assert not address_info["iswatchonly"] assert not address_info["isscript"]
def run_test(self): # Sanity-check the test harness self.nodes[0].generate(101) assert_equal(self.nodes[0].getblockcount(), 101) self.sync_all() # Node 0 shields some funds dest_addr = self.nodes[0].z_getnewaddress(POOL_NAME.lower()) taddr0 = get_coinbase_address(self.nodes[0]) recipients = [] recipients.append({"address": dest_addr, "amount": Decimal('10')}) myopid = self.nodes[0].z_sendmany(taddr0, recipients, 1, 0) wait_and_assert_operationid_status(self.nodes[0], myopid) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].z_getbalance(dest_addr), Decimal('10')) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('10')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('10')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('10')) # Relaunch node 0 with in-memory size of value pools set to zero. self.restart_and_sync_node(0, TURNSTILE_ARGS) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('10')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('10')) # Node 0 creates an unshielding transaction recipients = [] recipients.append({"address": taddr0, "amount": Decimal('1')}) myopid = self.nodes[0].z_sendmany(dest_addr, recipients, 1, 0) mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid) # Verify transaction appears in mempool of nodes self.sync_all() assert(mytxid in self.nodes[0].getrawmempool()) assert(mytxid in self.nodes[1].getrawmempool()) assert(mytxid in self.nodes[2].getrawmempool()) # Node 0 mines a block count = self.nodes[0].getblockcount() self.nodes[0].generate(1) self.sync_all() # Verify the mined block does not contain the unshielding transaction block = self.nodes[0].getblock(self.nodes[0].getbestblockhash()) assert_equal(len(block["tx"]), 1) assert_equal(block["height"], count + 1) # Stop node 0 and check logs to verify the miner excluded the transaction from the block self.nodes[0].stop() bitcoind_processes[0].wait() logpath = self.options.tmpdir + "/node0/regtest/debug.log" foundErrorMsg = False with open(logpath, "r") as myfile: logdata = myfile.readlines() for logline in logdata: if "CreateNewBlock(): tx " + mytxid + " appears to violate " + POOL_NAME.capitalize() + " turnstile" in logline: foundErrorMsg = True break assert(foundErrorMsg) # Launch node 0 with in-memory size of value pools set to zero. self.start_and_sync_node(0, TURNSTILE_ARGS) # Node 1 mines a block oldhash = self.nodes[0].getbestblockhash() self.nodes[1].generate(1) newhash = self.nodes[1].getbestblockhash() # Verify block contains the unshielding transaction assert(mytxid in self.nodes[1].getblock(newhash)["tx"]) # Verify nodes 1 and 2 have accepted the block as valid sync_blocks(self.nodes[1:3]) sync_mempools(self.nodes[1:3]) assert_equal(len(self.nodes[1].getrawmempool()), 0) assert_equal(len(self.nodes[2].getrawmempool()), 0) # Verify node 0 has not accepted the block assert_equal(oldhash, self.nodes[0].getbestblockhash()) assert(mytxid in self.nodes[0].getrawmempool()) self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('9')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('9')) # Stop node 0 and check logs to verify the block was rejected as a turnstile violation self.nodes[0].stop() bitcoind_processes[0].wait() logpath = self.options.tmpdir + "/node0/regtest/debug.log" foundConnectBlockErrorMsg = False foundInvalidBlockErrorMsg = False foundConnectTipErrorMsg = False with open(logpath, "r") as myfile: logdata = myfile.readlines() for logline in logdata: if "ConnectBlock(): turnstile violation in " + POOL_NAME.capitalize() + " shielded value pool" in logline: foundConnectBlockErrorMsg = True elif "InvalidChainFound: invalid block=" + newhash in logline: foundInvalidBlockErrorMsg = True elif "ConnectTip(): ConnectBlock " + newhash + " failed" in logline: foundConnectTipErrorMsg = True assert(foundConnectBlockErrorMsg and foundInvalidBlockErrorMsg and foundConnectTipErrorMsg) # Launch node 0 without overriding the pool size, so the node can sync with rest of network. self.start_and_sync_node(0) assert_equal(newhash, self.nodes[0].getbestblockhash())
def run_test(self): ''' Started from PoW cache. ''' utxo = self.nodes[0].listunspent(10) txid = utxo[0]['txid'] vout = utxo[0]['vout'] value = utxo[0]['amount'] fee = Decimal("0.0001") # MAX_ANCESTORS transactions off a confirmed tx should be fine chain = [] for i in range(MAX_ANCESTORS): (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1) value = sent_value chain.append(txid) # Check mempool has MAX_ANCESTORS transactions in it, and descendant # count and fees should look correct mempool = self.nodes[0].getrawmempool(True) assert_equal(len(mempool), MAX_ANCESTORS) descendant_count = 1 descendant_fees = 0 descendant_size = 0 SATOSHIS = 100000000 for x in reversed(chain): assert_equal(mempool[x]['descendantcount'], descendant_count) descendant_fees += mempool[x]['fee'] assert_equal(mempool[x]['descendantfees'], SATOSHIS * descendant_fees) descendant_size += mempool[x]['size'] assert_equal(mempool[x]['descendantsize'], descendant_size) descendant_count += 1 # Adding one more transaction on to the chain should fail. try: self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1) except JSONRPCException as e: self.log.info("too-long-ancestor-chain successfully rejected") # TODO: check that node1's mempool is as expected # TODO: test ancestor size limits # Now test descendant chain limits txid = utxo[1]['txid'] value = utxo[1]['amount'] vout = utxo[1]['vout'] transaction_package = [] # First create one parent tx with 10 children (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10) parent_transaction = txid for i in range(10): transaction_package.append({ 'txid': txid, 'vout': i, 'amount': sent_value }) for i in range(MAX_DESCENDANTS): utxo = transaction_package.pop(0) try: (txid, sent_value) = self.chain_transaction( self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) for j in range(10): transaction_package.append({ 'txid': txid, 'vout': j, 'amount': sent_value }) if i == MAX_DESCENDANTS - 2: mempool = self.nodes[0].getrawmempool(True) assert_equal( mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS) except JSONRPCException as e: self.log.info(e.error['message']) assert_equal(i, MAX_DESCENDANTS - 1) self.log.info( "tx that would create too large descendant package successfully rejected" ) # TODO: check that node1's mempool is as expected # TODO: test descendant size limits # Test reorg handling # First, the basics: self.nodes[0].generate(1) sync_blocks(self.nodes) self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash()) self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash()) # Now test the case where node1 has a transaction T in its mempool that # depends on transactions A and B which are in a mined block, and the # block containing A and B is disconnected, AND B is not accepted back # into node1's mempool because its ancestor count is too high. # Create 8 transactions, like so: # Tx0 -> Tx1 (vout0) # \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7 # # Mine them in the next block, then generate a new tx8 that spends # Tx1 and Tx7, and add to node1's mempool, then disconnect the # last block. # Create tx0 with 2 outputs utxo = self.nodes[0].listunspent() txid = utxo[0]['txid'] value = utxo[0]['amount'] vout = utxo[0]['vout'] send_value = satoshi_round((value - fee) / 2) inputs = [{'txid': txid, 'vout': vout}] outputs = {} for i in range(2): outputs[self.nodes[0].getnewaddress()] = float(send_value) rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransaction(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) tx0_id = txid value = send_value # Create tx1 (tx1_id, tx1_value) = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1) # Create tx2-7 vout = 1 txid = tx0_id for i in range(6): (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1) vout = 0 value = sent_value # Mine these in a block self.nodes[0].generate(1) self.sync_all() # Now generate tx8, with a big fee inputs = [{'txid': tx1_id, 'vout': 0}, {'txid': txid, 'vout': 0}] outputs = {self.nodes[0].getnewaddress(): send_value + value - 4 * fee} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransaction(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) sync_mempools(self.nodes) # Now try to disconnect the tip on each node... self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) sync_blocks(self.nodes)
wait_and_assert_operationid_status(self.nodes[0], opid2) if self.addr_type == 'sprout': # Shielding the 800 utxos will occur over two transactions, since max tx size is 100,000 bytes. # We don't verify shieldingValue as utxos are not selected in any specific order, so value can change on each test run. # We set an unrealistically high limit parameter of 99999, to verify that max tx size will constrain the number of utxos. verify_locking('662', '138', 99999) else: # Shield the 800 utxos over two transactions verify_locking('500', '300', 500) # sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected. # So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated # which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced. sync_blocks(self.nodes[:2]) sync_mempools(self.nodes[:2]) self.nodes[1].generate(1) self.sync_all() if self.addr_type == 'sprout': # Verify maximum number of utxos which node 2 can shield is limited by option -mempooltxinputlimit # This option is used when the limit parameter is set to 0. mytaddr = get_coinbase_address(self.nodes[2], 20) result = self.nodes[2].z_shieldcoinbase(mytaddr, myzaddr, Decimal('0.0001'), 0) assert_equal(result["shieldingUTXOs"], Decimal('7')) assert_equal(result["remainingUTXOs"], Decimal('13')) wait_and_assert_operationid_status(self.nodes[2], result['opid']) self.sync_all() self.nodes[1].generate(1) self.sync_all()
def run_test(self, test): print "Mining blocks..." test.nodes[0].generate(1) do_not_shield_taddr = test.nodes[0].getnewaddress() test.nodes[0].generate(4) walletinfo = test.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 50) assert_equal(walletinfo['balance'], 0) test.sync_all() test.nodes[2].generate(1) test.nodes[2].getnewaddress() test.nodes[2].generate(1) test.nodes[2].getnewaddress() test.nodes[2].generate(1) test.sync_all() test.nodes[1].generate(101) test.sync_all() assert_equal(test.nodes[0].getbalance(), 50) assert_equal(test.nodes[1].getbalance(), 10) assert_equal(test.nodes[2].getbalance(), 30) # Shield the coinbase myzaddr = test.nodes[0].z_getnewaddress(self.addr_type) result = test.nodes[0].z_shieldcoinbase("*", myzaddr, 0) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Prepare some UTXOs and notes for merging mytaddr = test.nodes[0].getnewaddress() mytaddr2 = test.nodes[0].getnewaddress() mytaddr3 = test.nodes[0].getnewaddress() result = test.nodes[0].z_sendmany(myzaddr, [ {'address': do_not_shield_taddr, 'amount': 10}, {'address': mytaddr, 'amount': 10}, {'address': mytaddr2, 'amount': 10}, {'address': mytaddr3, 'amount': 10}, ], 1, 0) wait_and_assert_operationid_status(test.nodes[0], result) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Merging will fail because from arguments need to be in an array assert_mergetoaddress_exception( "JSON value is not an array as expected", lambda: test.nodes[0].z_mergetoaddress("notanarray", myzaddr)) # Merging will fail when trying to spend from watch-only address test.nodes[2].importaddress(mytaddr) assert_mergetoaddress_exception( "Could not find any funds to merge.", lambda: test.nodes[2].z_mergetoaddress([mytaddr], myzaddr)) # Merging will fail because fee is negative assert_mergetoaddress_exception( "Amount out of range", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, -1)) # Merging will fail because fee is larger than MAX_MONEY assert_mergetoaddress_exception( "Amount out of range", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, Decimal('21000000.00000001'))) # Merging will fail because fee is larger than sum of UTXOs assert_mergetoaddress_exception( "Insufficient funds, have 50.00, which is less than miners fee 999.00", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, 999)) # Merging will fail because transparent limit parameter must be at least 0 assert_mergetoaddress_exception( "Limit on maximum number of UTXOs cannot be negative", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), -1)) # Merging will fail because transparent limit parameter is absurdly large assert_mergetoaddress_exception( "JSON integer out of range", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), 99999999999999)) # Merging will fail because shielded limit parameter must be at least 0 assert_mergetoaddress_exception( "Limit on maximum number of notes cannot be negative", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), 50, -1)) # Merging will fail because shielded limit parameter is absurdly large assert_mergetoaddress_exception( "JSON integer out of range", lambda: test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, Decimal('0.001'), 50, 99999999999999)) # Merging will fail for this specific case where it would spend a fee and do nothing assert_mergetoaddress_exception( "Destination address is also the only source address, and all its funds are already merged.", lambda: test.nodes[0].z_mergetoaddress([mytaddr], mytaddr)) # Merge UTXOs from node 0 of value 30, standard fee of 0.00010000 result = test.nodes[0].z_mergetoaddress([mytaddr, mytaddr2, mytaddr3], myzaddr) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Confirm balances and that do_not_shield_taddr containing funds of 10 was left alone assert_equal(test.nodes[0].getbalance(), 10) assert_equal(test.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0')) assert_equal(test.nodes[0].z_getbalance(myzaddr), Decimal('39.99990000')) assert_equal(test.nodes[1].getbalance(), 40) assert_equal(test.nodes[2].getbalance(), 30) # Shield all notes to another z-addr myzaddr2 = test.nodes[0].z_getnewaddress(self.addr_type) result = test.nodes[0].z_mergetoaddress(self.any_zaddr, myzaddr2, 0) assert_equal(result["mergingUTXOs"], Decimal('0')) assert_equal(result["remainingUTXOs"], Decimal('0')) assert_equal(result["mergingNotes"], Decimal('2')) assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() blockhash = test.nodes[1].generate(1) test.sync_all() assert_equal(len(test.nodes[0].getblock(blockhash[0])['tx']), 2) assert_equal(test.nodes[0].z_getbalance(myzaddr), 0) assert_equal(test.nodes[0].z_getbalance(myzaddr2), Decimal('39.99990000')) # Shield coinbase UTXOs from any node 2 taddr, and set fee to 0 result = test.nodes[2].z_shieldcoinbase("*", myzaddr, 0) wait_and_assert_operationid_status(test.nodes[2], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() assert_equal(test.nodes[0].getbalance(), 10) assert_equal(test.nodes[0].z_getbalance(myzaddr), Decimal('30')) assert_equal(test.nodes[0].z_getbalance(myzaddr2), Decimal('39.99990000')) assert_equal(test.nodes[1].getbalance(), 60) assert_equal(test.nodes[2].getbalance(), 0) # Merge all notes from node 0 into a node 0 taddr, and set fee to 0 result = test.nodes[0].z_mergetoaddress(self.any_zaddr, mytaddr, 0) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() assert_equal(test.nodes[0].getbalance(), Decimal('79.99990000')) assert_equal(test.nodes[0].z_getbalance(do_not_shield_taddr), Decimal('10.0')) assert_equal(test.nodes[0].z_getbalance(mytaddr), Decimal('69.99990000')) assert_equal(test.nodes[0].z_getbalance(myzaddr), 0) assert_equal(test.nodes[0].z_getbalance(myzaddr2), 0) assert_equal(test.nodes[1].getbalance(), 70) assert_equal(test.nodes[2].getbalance(), 0) # Merge all node 0 UTXOs together into a node 1 taddr, and set fee to 0 test.nodes[1].getnewaddress() # Ensure we have an empty address n1taddr = test.nodes[1].getnewaddress() result = test.nodes[0].z_mergetoaddress(["ANY_TADDR"], n1taddr, 0) wait_and_assert_operationid_status(test.nodes[0], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() assert_equal(test.nodes[0].getbalance(), 0) assert_equal(test.nodes[0].z_getbalance(do_not_shield_taddr), 0) assert_equal(test.nodes[0].z_getbalance(mytaddr), 0) assert_equal(test.nodes[0].z_getbalance(myzaddr), 0) assert_equal(test.nodes[1].getbalance(), Decimal('159.99990000')) assert_equal(test.nodes[1].z_getbalance(n1taddr), Decimal('79.99990000')) assert_equal(test.nodes[2].getbalance(), 0) # Generate self.utxos_to_generate regular UTXOs on node 0, and 20 regular UTXOs on node 2 mytaddr = test.nodes[0].getnewaddress() n2taddr = test.nodes[2].getnewaddress() test.nodes[1].generate(1000) test.sync_all() for i in range(self.utxos_to_generate): test.nodes[1].sendtoaddress(mytaddr, 1) for i in range(20): test.nodes[1].sendtoaddress(n2taddr, 1) test.nodes[1].generate(1) test.sync_all() # Merging the UTXOs will conditionally occur over two transactions, since max tx size is 100,000 bytes before Sapling and 2,000,000 after. # We don't verify mergingTransparentValue as UTXOs are not selected in any specific order, so value can change on each test run. # We set an unrealistically high limit parameter of 99999, to verify that max tx size will constrain the number of UTXOs. result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 99999) assert_equal(result["mergingUTXOs"], self.utxos_in_tx1) assert_equal(result["remainingUTXOs"], self.utxos_in_tx2) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["mergingShieldedValue"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) assert_equal(result["remainingShieldedValue"], Decimal('0')) remainingTransparentValue = result["remainingTransparentValue"] wait_and_assert_operationid_status(test.nodes[0], result['opid']) # For sapling we do not check that this occurs over two transactions because of the time that it would take if self.utxos_in_tx2 > 0: # Verify that UTXOs are locked (not available for selection) by queuing up another merging operation result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, 0, 0) assert_equal(result["mergingUTXOs"], self.utxos_in_tx2) assert_equal(result["mergingTransparentValue"], Decimal(remainingTransparentValue)) assert_equal(result["remainingUTXOs"], Decimal('0')) assert_equal(result["remainingTransparentValue"], Decimal('0')) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["mergingShieldedValue"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) assert_equal(result["remainingShieldedValue"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) # sync_all() invokes sync_mempool() but node 2's mempool limit will cause tx1 and tx2 to be rejected. # So instead, we sync on blocks and mempool for node 0 and node 1, and after a new block is generated # which mines tx1 and tx2, all nodes will have an empty mempool which can then be synced. sync_blocks(test.nodes[:2]) sync_mempools(test.nodes[:2]) # Generate enough blocks to ensure all transactions are mined while test.nodes[1].getmempoolinfo()['size'] > 0: test.nodes[1].generate(1) test.sync_all() # Verify maximum number of UTXOs which node 2 can shield is limited by option -mempooltxinputlimit # This option is used when the limit parameter is set to 0. # -mempooltxinputlimit is not used after overwinter activation if self.test_mempooltxinputlimit: expected_to_merge = 7 expected_remaining = 13 else: expected_to_merge = 20 expected_remaining = 0 result = test.nodes[2].z_mergetoaddress([n2taddr], myzaddr, Decimal('0.0001'), 0) assert_equal(result["mergingUTXOs"], expected_to_merge) assert_equal(result["remainingUTXOs"], expected_remaining) assert_equal(result["mergingNotes"], Decimal('0')) assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[2], result['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Verify maximum number of UTXOs which node 0 can shield is set by default limit parameter of 50 mytaddr = test.nodes[0].getnewaddress() for i in range(100): test.nodes[1].sendtoaddress(mytaddr, 1) test.nodes[1].generate(1) test.sync_all() result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, Decimal('0.0001')) assert_equal(result["mergingUTXOs"], Decimal('50')) assert_equal(result["remainingUTXOs"], Decimal('50')) assert_equal(result["mergingNotes"], Decimal('0')) # Remaining notes are only counted if we are trying to merge any notes assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) # Verify maximum number of UTXOs which node 0 can shield can be set by the limit parameter result = test.nodes[0].z_mergetoaddress([mytaddr], myzaddr, Decimal('0.0001'), 33) assert_equal(result["mergingUTXOs"], Decimal('33')) assert_equal(result["remainingUTXOs"], Decimal('17')) assert_equal(result["mergingNotes"], Decimal('0')) # Remaining notes are only counted if we are trying to merge any notes assert_equal(result["remainingNotes"], Decimal('0')) wait_and_assert_operationid_status(test.nodes[0], result['opid']) # Don't sync node 2 which rejects the tx due to its mempooltxinputlimit sync_blocks(test.nodes[:2]) sync_mempools(test.nodes[:2]) test.nodes[1].generate(1) test.sync_all() # Verify maximum number of notes which node 0 can shield can be set by the limit parameter # Also check that we can set off a second merge before the first one is complete # myzaddr will have 5 notes if testing before to Sapling activation and 4 otherwise num_notes = len(test.nodes[0].z_listunspent(0)) result1 = test.nodes[0].z_mergetoaddress([myzaddr], myzaddr, 0.0001, 50, 2) result2 = test.nodes[0].z_mergetoaddress([myzaddr], myzaddr, 0.0001, 50, 2) # First merge should select from all notes assert_equal(result1["mergingUTXOs"], Decimal('0')) # Remaining UTXOs are only counted if we are trying to merge any UTXOs assert_equal(result1["remainingUTXOs"], Decimal('0')) assert_equal(result1["mergingNotes"], Decimal('2')) assert_equal(result1["remainingNotes"], num_notes - 2) # Second merge should ignore locked notes assert_equal(result2["mergingUTXOs"], Decimal('0')) assert_equal(result2["remainingUTXOs"], Decimal('0')) assert_equal(result2["mergingNotes"], Decimal('2')) assert_equal(result2["remainingNotes"], num_notes - 4) wait_and_assert_operationid_status(test.nodes[0], result1['opid']) wait_and_assert_operationid_status(test.nodes[0], result2['opid']) test.sync_all() test.nodes[1].generate(1) test.sync_all() # Shield both UTXOs and notes to a z-addr result = test.nodes[0].z_mergetoaddress(self.any_zaddr_or_utxo, myzaddr, 0, 10, 2) assert_equal(result["mergingUTXOs"], Decimal('10')) assert_equal(result["remainingUTXOs"], Decimal('7')) assert_equal(result["mergingNotes"], Decimal('2')) assert_equal(result["remainingNotes"], num_notes - 4) wait_and_assert_operationid_status(test.nodes[0], result['opid']) # Don't sync node 2 which rejects the tx due to its mempooltxinputlimit sync_blocks(test.nodes[:2]) sync_mempools(test.nodes[:2]) test.nodes[1].generate(1) test.sync_all()
def run_test(self): # Mine 101 blocks on node5 to bring nodes out of IBD and make sure that # no coinbases are maturing for the nodes-under-test during the test self.nodes[5].generate(101) sync_blocks(self.nodes) uncompressed_1 = "0496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858ee" uncompressed_2 = "047211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073dee6c89064984f03385237d92167c13e236446b417ab79a0fcae412ae3316b77" compressed_1 = "0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52" compressed_2 = "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073" # addmultisigaddress with at least 1 uncompressed key should return a legacy address. for node in range(4): self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, uncompressed_2])['address'], True, 'legacy') self.test_address(node, self.nodes[node].addmultisigaddress(2, [compressed_1, uncompressed_2])['address'], True, 'legacy') self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, compressed_2])['address'], True, 'legacy') # addmultisigaddress with all compressed keys should return the appropriate address type (even when the keys are not ours). self.test_address(0, self.nodes[0].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'legacy') self.test_address(1, self.nodes[1].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit') self.test_address(2, self.nodes[2].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit') self.test_address(3, self.nodes[3].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'bech32') for explicit_type, multisig, from_node in itertools.product([False, True], [False, True], range(4)): address_type = None if explicit_type and not multisig: if from_node == 1: address_type = 'bech32' elif from_node == 0 or from_node == 3: address_type = 'p2sh-segwit' else: address_type = 'legacy' self.log.info("Sending from node {} ({}) with{} multisig using {}".format(from_node, self.extra_args[from_node], "" if multisig else "out", "default" if address_type is None else address_type)) old_balances = self.get_balances() self.log.debug("Old balances are {}".format(old_balances)) to_send = (old_balances[from_node] / 101).quantize(Decimal("0.00000001")) sends = {} self.log.debug("Prepare sends") for n, to_node in enumerate(range(from_node, from_node + 4)): to_node %= 4 change = False if not multisig: if from_node == to_node: # When sending non-multisig to self, use getrawchangeaddress address = self.nodes[to_node].getrawchangeaddress(address_type=address_type) change = True else: address = self.nodes[to_node].getnewaddress(address_type=address_type) else: addr1 = self.nodes[to_node].getnewaddress() addr2 = self.nodes[to_node].getnewaddress() address = self.nodes[to_node].addmultisigaddress(2, [addr1, addr2])['address'] # Do some sanity checking on the created address if address_type is not None: typ = address_type elif to_node == 0: typ = 'legacy' elif to_node == 1 or (to_node == 2 and not change): typ = 'p2sh-segwit' else: typ = 'bech32' self.test_address(to_node, address, multisig, typ) # Output entry sends[address] = to_send * 10 * (1 + n) self.log.debug("Sending: {}".format(sends)) self.nodes[from_node].sendmany("", sends) sync_mempools(self.nodes) unconf_balances = self.get_balances(False) self.log.debug("Check unconfirmed balances: {}".format(unconf_balances)) assert_equal(unconf_balances[from_node], 0) for n, to_node in enumerate(range(from_node + 1, from_node + 4)): to_node %= 4 assert_equal(unconf_balances[to_node], to_send * 10 * (2 + n)) # node5 collects fee and block subsidy to keep accounting simple self.nodes[5].generate(1) sync_blocks(self.nodes) new_balances = self.get_balances() self.log.debug("Check new balances: {}".format(new_balances)) # We don't know what fee was set, so we can only check bounds on the balance of the sending node assert_greater_than(new_balances[from_node], to_send * 10) assert_greater_than(to_send * 11, new_balances[from_node]) for n, to_node in enumerate(range(from_node + 1, from_node + 4)): to_node %= 4 assert_equal(new_balances[to_node], old_balances[to_node] + to_send * 10 * (2 + n)) # Get one p2sh/segwit address from node2 and two bech32 addresses from node3: to_address_p2sh = self.nodes[2].getnewaddress() to_address_bech32_1 = self.nodes[3].getnewaddress() to_address_bech32_2 = self.nodes[3].getnewaddress() # Fund node 4: self.nodes[5].sendtoaddress(self.nodes[4].getnewaddress(), Decimal("1")) self.nodes[5].generate(1) sync_blocks(self.nodes) assert_equal(self.nodes[4].getbalance(), 1) self.log.info("Nodes with addresstype=legacy never use a P2WPKH change output") self.test_change_output_type(0, [to_address_bech32_1], 'legacy') self.log.info("Nodes with addresstype=p2sh-segwit only use a P2WPKH change output if any destination address is bech32:") self.test_change_output_type(1, [to_address_p2sh], 'p2sh-segwit') self.test_change_output_type(1, [to_address_bech32_1], 'bech32') self.test_change_output_type(1, [to_address_p2sh, to_address_bech32_1], 'bech32') self.test_change_output_type(1, [to_address_bech32_1, to_address_bech32_2], 'bech32') self.log.info("Nodes with change_type=bech32 always use a P2WPKH change output:") self.test_change_output_type(2, [to_address_bech32_1], 'bech32') self.test_change_output_type(2, [to_address_p2sh], 'bech32') self.log.info("Nodes with addresstype=bech32 always use a P2WPKH change output (unless changetype is set otherwise):") self.test_change_output_type(3, [to_address_bech32_1], 'bech32') self.test_change_output_type(3, [to_address_p2sh], 'bech32') self.log.info('getrawchangeaddress defaults to addresstype if -changetype is not set and argument is absent') self.test_address(3, self.nodes[3].getrawchangeaddress(), multisig=False, typ='bech32') self.log.info('test invalid address type arguments') assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].addmultisigaddress, 2, [compressed_1, compressed_2], None, '') assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getnewaddress, None, '') assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getrawchangeaddress, '') assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].getrawchangeaddress, 'bech23') self.log.info("Nodes with changetype=p2sh-segwit never use a P2WPKH change output") self.test_change_output_type(4, [to_address_bech32_1], 'p2sh-segwit') self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit') self.log.info("Except for getrawchangeaddress if specified:") self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit') self.test_address(4, self.nodes[4].getrawchangeaddress('bech32'), multisig=False, typ='bech32')
def run_test(self): # Sanity-check the test harness self.nodes[0].generate(101) assert_equal(self.nodes[0].getblockcount(), 101) self.sync_all() # Node 0 shields some funds dest_addr = self.nodes[0].z_getnewaddress(POOL_NAME.lower()) taddr0 = get_coinbase_address(self.nodes[0]) recipients = [] recipients.append({"address": dest_addr, "amount": Decimal('3920000')}) myopid = self.nodes[0].z_sendmany(taddr0, recipients, 1, 0) wait_and_assert_operationid_status(self.nodes[0], myopid) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].z_getbalance(dest_addr), Decimal('3920000')) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('3920000')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('3920000')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('3920000')) # Relaunch node 0 with in-memory size of value pools set to zero. self.restart_and_sync_node(0, TURNSTILE_ARGS) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('3920000')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('3920000')) # Node 0 creates an unshielding transaction recipients = [] recipients.append({"address": taddr0, "amount": Decimal('1')}) myopid = self.nodes[0].z_sendmany(dest_addr, recipients, 1, 0) mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid) # Verify transaction appears in mempool of nodes self.sync_all() assert (mytxid in self.nodes[0].getrawmempool()) assert (mytxid in self.nodes[1].getrawmempool()) assert (mytxid in self.nodes[2].getrawmempool()) # Node 0 mines a block count = self.nodes[0].getblockcount() self.nodes[0].generate(1) self.sync_all() # Verify the mined block does not contain the unshielding transaction block = self.nodes[0].getblock(self.nodes[0].getbestblockhash()) assert_equal(len(block["tx"]), 1) assert_equal(block["height"], count + 1) # Stop node 0 and check logs to verify the miner excluded the transaction from the block self.nodes[0].stop() bitcoind_processes[0].wait() logpath = self.options.tmpdir + "/node0/regtest/debug.log" foundErrorMsg = False with open(logpath, "r") as myfile: logdata = myfile.readlines() for logline in logdata: if "CreateNewBlock(): tx " + mytxid + " appears to violate " + POOL_NAME.capitalize( ) + " turnstile" in logline: foundErrorMsg = True break assert (foundErrorMsg) # Launch node 0 with in-memory size of value pools set to zero. self.start_and_sync_node(0, TURNSTILE_ARGS) # Node 1 mines a block oldhash = self.nodes[0].getbestblockhash() self.nodes[1].generate(1) newhash = self.nodes[1].getbestblockhash() # Verify block contains the unshielding transaction assert (mytxid in self.nodes[1].getblock(newhash)["tx"]) # Verify nodes 1 and 2 have accepted the block as valid sync_blocks(self.nodes[1:3]) sync_mempools(self.nodes[1:3]) assert_equal(len(self.nodes[1].getrawmempool()), 0) assert_equal(len(self.nodes[2].getrawmempool()), 0) # Verify node 0 has not accepted the block assert_equal(oldhash, self.nodes[0].getbestblockhash()) assert (mytxid in self.nodes[0].getrawmempool()) self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) # Verify size of shielded pool self.assert_pool_balance(self.nodes[0], POOL_NAME.lower(), Decimal('0')) self.assert_pool_balance(self.nodes[1], POOL_NAME.lower(), Decimal('3919999')) self.assert_pool_balance(self.nodes[2], POOL_NAME.lower(), Decimal('3919999')) # Stop node 0 and check logs to verify the block was rejected as a turnstile violation self.nodes[0].stop() bitcoind_processes[0].wait() logpath = self.options.tmpdir + "/node0/regtest/debug.log" foundConnectBlockErrorMsg = False foundInvalidBlockErrorMsg = False foundConnectTipErrorMsg = False with open(logpath, "r") as myfile: logdata = myfile.readlines() for logline in logdata: if "ConnectBlock(): turnstile violation in " + POOL_NAME.capitalize( ) + " shielded value pool" in logline: foundConnectBlockErrorMsg = True elif "InvalidChainFound: invalid block=" + newhash in logline: foundInvalidBlockErrorMsg = True elif "ConnectTip(): ConnectBlock " + newhash + " failed" in logline: foundConnectTipErrorMsg = True assert (foundConnectBlockErrorMsg and foundInvalidBlockErrorMsg and foundConnectTipErrorMsg) # Launch node 0 without overriding the pool size, so the node can sync with rest of network. self.start_and_sync_node(0) assert_equal(newhash, self.nodes[0].getbestblockhash())
def run_rbf_opt_in_test(self): # Check whether a transaction signals opt-in RBF itself def is_opt_in(node, txid): rawtx = node.getrawtransaction(txid, 1) for x in rawtx["vin"]: if x["sequence"] < 0xfffffffe: return True return False # Find an unconfirmed output matching a certain txid def get_unconfirmed_utxo_entry(node, txid_to_match): utxo = node.listunspent(0, 0) for i in utxo: if i["txid"] == txid_to_match: return i return None # 1. Chain a few transactions that don't opt-in. txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) assert not is_opt_in(self.nodes[0], txid_1) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"}) # Tx2 will build off txid_1, still not opting in to RBF. utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_1) assert_equal(utxo_to_use["safe"], True) utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) assert_equal(utxo_to_use["safe"], False) # Create tx2 using createrawtransaction inputs = [{"txid": utxo_to_use["txid"], "vout": utxo_to_use["vout"]}] outputs = {self.nodes[0].getnewaddress(): 0.999} tx2 = self.nodes[1].createrawtransaction(inputs, outputs) tx2_signed = self.nodes[1].signrawtransactionwithwallet(tx2)["hex"] txid_2 = self.nodes[1].sendrawtransaction(tx2_signed) # ...and check the result assert not is_opt_in(self.nodes[1], txid_2) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"}) sync_mempools(self.nodes) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"}) # Tx3 will opt-in to RBF utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2) inputs = [{"txid": txid_2, "vout": utxo_to_use["vout"]}] outputs = {self.nodes[1].getnewaddress(): 0.998} tx3 = self.nodes[0].createrawtransaction(inputs, outputs) tx3_modified = tx_from_hex(tx3) tx3_modified.vin[0].nSequence = 0 tx3 = tx3_modified.serialize().hex() tx3_signed = self.nodes[0].signrawtransactionwithwallet(tx3)['hex'] txid_3 = self.nodes[0].sendrawtransaction(tx3_signed) assert is_opt_in(self.nodes[0], txid_3) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"}) # Tx4 will chain off tx3. Doesn't signal itself, but depends on one # that does. utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3) inputs = [{"txid": txid_3, "vout": utxo_to_use["vout"]}] outputs = {self.nodes[0].getnewaddress(): 0.997} tx4 = self.nodes[1].createrawtransaction(inputs, outputs) tx4_signed = self.nodes[1].signrawtransactionwithwallet(tx4)["hex"] txid_4 = self.nodes[1].sendrawtransaction(tx4_signed) assert not is_opt_in(self.nodes[1], txid_4) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"}) sync_mempools(self.nodes) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"}) # Replace tx3, and check that tx4 becomes unknown tx3_b = tx3_modified tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee tx3_b = tx3_b.serialize().hex() tx3_b_signed = self.nodes[0].signrawtransactionwithwallet(tx3_b)['hex'] txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True) assert is_opt_in(self.nodes[0], txid_3b) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"}) # Check gettransaction as well: for n in self.nodes[0:2]: assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no") assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no") assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes") assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes") assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown") # After mining a transaction, it's no longer BIP125-replaceable self.nodes[0].generate(1) assert txid_3b not in self.nodes[0].getrawmempool() assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no") assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown")
def run_test(self): self.nodes[1].generate(100) sync_blocks(self.nodes) balance = self.nodes[0].getbalance() txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) sync_mempools(self.nodes) self.nodes[1].generate(1) # Can not abandon non-wallet transaction assert_raises_rpc_error( -5, 'Invalid or non-wallet transaction id', lambda: self.nodes[0].abandontransaction(txid='ff' * 32)) # Can not abandon confirmed transaction assert_raises_rpc_error( -5, 'Transaction not eligible for abandonment', lambda: self.nodes[0].abandontransaction(txid=txA)) sync_blocks(self.nodes) newbalance = self.nodes[0].getbalance() assert (balance - newbalance < Decimal("0.001") ) #no more than fees lost balance = newbalance # Disconnect nodes so node0's transactions don't get into node1's mempool disconnect_nodes(self.nodes[0], 1) # Identify the 10vektor outputs nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txA, 1)["vout"]) if vout["value"] == Decimal("10")) nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txB, 1)["vout"]) if vout["value"] == Decimal("10")) nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txC, 1)["vout"]) if vout["value"] == Decimal("10")) inputs = [] # spend 10vektor outputs from txA and txB inputs.append({"txid": txA, "vout": nA}) inputs.append({"txid": txB, "vout": nB}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998") outputs[self.nodes[1].getnewaddress()] = Decimal("5") signed = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) txAB1 = self.nodes[0].sendrawtransaction(signed["hex"]) # Identify the 14.99998vektor output nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction( txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998")) #Create a child tx spending AB1 and C inputs = [] inputs.append({"txid": txAB1, "vout": nAB}) inputs.append({"txid": txC, "vout": nC}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996") signed2 = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"]) # Create a child tx spending ABC2 signed3_change = Decimal("24.999") inputs = [{"txid": txABC2, "vout": 0}] outputs = {self.nodes[0].getnewaddress(): signed3_change} signed3 = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) # note tx is never directly referenced, only abandoned as a child of the above self.nodes[0].sendrawtransaction(signed3["hex"]) # In mempool txs from self should increase balance from change newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("30") + signed3_change) balance = newbalance # Restart the node with a higher min relay fee so the parent tx is no longer in mempool # TODO: redo with eviction self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) # Verify txs no longer in either node's mempool assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) # Not in mempool txs from self should only reduce balance # inputs are still spent, but change not received newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - signed3_change) # Unconfirmed received funds that are not in mempool, also shouldn't show # up in unconfirmed balance unconfbalance = self.nodes[0].getunconfirmedbalance( ) + self.nodes[0].getbalance() assert_equal(unconfbalance, newbalance) # Also shouldn't show up in listunspent assert (not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)]) balance = newbalance # Abandon original transaction and verify inputs are available again # including that the child tx was also abandoned self.nodes[0].abandontransaction(txAB1) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("30")) balance = newbalance # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.00001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(self.nodes[0].getbalance(), balance) # But if it is received again then it is unabandoned # And since now in mempool, the change is available # But its child tx remains abandoned self.nodes[0].sendrawtransaction(signed["hex"]) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998")) balance = newbalance # Send child tx again so it is unabandoned self.nodes[0].sendrawtransaction(signed2["hex"]) newbalance = self.nodes[0].getbalance() assert_equal( newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996")) balance = newbalance # Remove using high relay fee again self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("24.9996")) balance = newbalance # Create a double spend of AB1 by spending again from only A's 10 output # Mine double spend from node 1 inputs = [] inputs.append({"txid": txA, "vout": nA}) outputs = {} outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999") tx = self.nodes[0].createrawtransaction(inputs, outputs) signed = self.nodes[0].signrawtransactionwithwallet(tx) self.nodes[1].sendrawtransaction(signed["hex"]) self.nodes[1].generate(1) connect_nodes(self.nodes[0], 1) sync_blocks(self.nodes) # Verify that B and C's 10 VEKTOR outputs are available for spending again because AB1 is now conflicted newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("20")) balance = newbalance # There is currently a minor bug around this and so this test doesn't work. See Issue #7315 # Invalidate the block with the double spend and B's 10 VEKTOR output should no longer be available # Don't think C's should either self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) newbalance = self.nodes[0].getbalance() #assert_equal(newbalance, balance - Decimal("10")) self.log.info( "If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer" ) self.log.info( "conflicted has not resumed causing its inputs to be seen as spent. See Issue #7315" ) self.log.info(str(balance) + " -> " + str(newbalance) + " ?")
def run_test(self): # Mine 101 blocks on node5 to bring nodes out of IBD and make sure that # no coinbases are maturing for the nodes-under-test during the test self.nodes[5].generate(101) sync_blocks(self.nodes) uncompressed_1 = "0496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858ee" uncompressed_2 = "047211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073dee6c89064984f03385237d92167c13e236446b417ab79a0fcae412ae3316b77" compressed_1 = "0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52" compressed_2 = "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073" # addmultisigaddress with at least 1 uncompressed key should return a legacy address. for node in range(4): self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, uncompressed_2])['address'], True, 'legacy') self.test_address(node, self.nodes[node].addmultisigaddress(2, [compressed_1, uncompressed_2])['address'], True, 'legacy') self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, compressed_2])['address'], True, 'legacy') # addmultisigaddress with all compressed keys should return the appropriate address type (even when the keys are not ours). self.test_address(0, self.nodes[0].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'legacy') self.test_address(1, self.nodes[1].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit') self.test_address(2, self.nodes[2].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit') self.test_address(3, self.nodes[3].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'bech32') for explicit_type, multisig, from_node in itertools.product([False, True], [False, True], range(4)): address_type = None if explicit_type and not multisig: if from_node == 1: address_type = 'bech32' elif from_node == 0 or from_node == 3: address_type = 'p2sh-segwit' else: address_type = 'legacy' self.log.info("Sending from node {} ({}) with{} multisig using {}".format(from_node, self.extra_args[from_node], "" if multisig else "out", "default" if address_type is None else address_type)) old_balances = self.get_balances() self.log.debug("Old balances are {}".format(old_balances)) to_send = (old_balances[from_node] / 101).quantize(Decimal("0.00000001")) sends = {} addresses = {} self.log.debug("Prepare sends") for n, to_node in enumerate(range(from_node, from_node + 4)): to_node %= 4 change = False if not multisig: if from_node == to_node: # When sending non-multisig to self, use getrawchangeaddress address = self.nodes[to_node].getrawchangeaddress(address_type=address_type) change = True else: address = self.nodes[to_node].getnewaddress(address_type=address_type) else: addr1 = self.nodes[to_node].getnewaddress() addr2 = self.nodes[to_node].getnewaddress() address = self.nodes[to_node].addmultisigaddress(2, [addr1, addr2])['address'] # Do some sanity checking on the created address if address_type is not None: typ = address_type elif to_node == 0: typ = 'legacy' elif to_node == 1 or (to_node == 2 and not change): typ = 'p2sh-segwit' else: typ = 'bech32' self.test_address(to_node, address, multisig, typ) # Output entry sends[address] = to_send * 10 * (1 + n) addresses[to_node] = (address, typ) self.log.debug("Sending: {}".format(sends)) self.nodes[from_node].sendmany("", sends) sync_mempools(self.nodes) unconf_balances = self.get_balances(False) self.log.debug("Check unconfirmed balances: {}".format(unconf_balances)) assert_equal(unconf_balances[from_node], 0) for n, to_node in enumerate(range(from_node + 1, from_node + 4)): to_node %= 4 assert_equal(unconf_balances[to_node], to_send * 10 * (2 + n)) # node5 collects fee and block subsidy to keep accounting simple self.nodes[5].generate(1) sync_blocks(self.nodes) # Verify that the receiving wallet contains a UTXO with the expected address, and expected descriptor for n, to_node in enumerate(range(from_node, from_node + 4)): to_node %= 4 found = False for utxo in self.nodes[to_node].listunspent(): if utxo['address'] == addresses[to_node][0]: found = True self.test_desc(to_node, addresses[to_node][0], multisig, addresses[to_node][1], utxo) break assert found new_balances = self.get_balances() self.log.debug("Check new balances: {}".format(new_balances)) # We don't know what fee was set, so we can only check bounds on the balance of the sending node assert_greater_than(new_balances[from_node], to_send * 10) assert_greater_than(to_send * 11, new_balances[from_node]) for n, to_node in enumerate(range(from_node + 1, from_node + 4)): to_node %= 4 assert_equal(new_balances[to_node], old_balances[to_node] + to_send * 10 * (2 + n)) # Get one p2sh/segwit address from node2 and two bech32 addresses from node3: to_address_p2sh = self.nodes[2].getnewaddress() to_address_bech32_1 = self.nodes[3].getnewaddress() to_address_bech32_2 = self.nodes[3].getnewaddress() # Fund node 4: self.nodes[5].sendtoaddress(self.nodes[4].getnewaddress(), Decimal("1")) self.nodes[5].generate(1) sync_blocks(self.nodes) assert_equal(self.nodes[4].getbalance(), 1) self.log.info("Nodes with addresstype=legacy never use a P2WPKH change output") self.test_change_output_type(0, [to_address_bech32_1], 'legacy') self.log.info("Nodes with addresstype=p2sh-segwit only use a P2WPKH change output if any destination address is bech32:") self.test_change_output_type(1, [to_address_p2sh], 'p2sh-segwit') self.test_change_output_type(1, [to_address_bech32_1], 'bech32') self.test_change_output_type(1, [to_address_p2sh, to_address_bech32_1], 'bech32') self.test_change_output_type(1, [to_address_bech32_1, to_address_bech32_2], 'bech32') self.log.info("Nodes with change_type=bech32 always use a P2WPKH change output:") self.test_change_output_type(2, [to_address_bech32_1], 'bech32') self.test_change_output_type(2, [to_address_p2sh], 'bech32') self.log.info("Nodes with addresstype=bech32 always use a P2WPKH change output (unless changetype is set otherwise):") self.test_change_output_type(3, [to_address_bech32_1], 'bech32') self.test_change_output_type(3, [to_address_p2sh], 'bech32') self.log.info('getrawchangeaddress defaults to addresstype if -changetype is not set and argument is absent') self.test_address(3, self.nodes[3].getrawchangeaddress(), multisig=False, typ='bech32') self.log.info('test invalid address type arguments') assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].addmultisigaddress, 2, [compressed_1, compressed_2], None, '') assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getnewaddress, None, '') assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getrawchangeaddress, '') assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].getrawchangeaddress, 'bech23') self.log.info("Nodes with changetype=p2sh-segwit never use a P2WPKH change output") self.test_change_output_type(4, [to_address_bech32_1], 'p2sh-segwit') self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit') self.log.info("Except for getrawchangeaddress if specified:") self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit') self.test_address(4, self.nodes[4].getrawchangeaddress('bech32'), multisig=False, typ='bech32')
def test_chainlock_overrides_islock(self, test_block_conflict): # create three raw TXs, they will conflict with each other rawtx1 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex'] rawtx2 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex'] rawtx3 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex'] rawtx1_obj = FromHex(CTransaction(), rawtx1) rawtx2_obj = FromHex(CTransaction(), rawtx2) rawtx3_obj = FromHex(CTransaction(), rawtx3) rawtx1_txid = self.nodes[0].sendrawtransaction(rawtx1) rawtx2_txid = encode( hash256(hex_str_to_bytes(rawtx2))[::-1], 'hex_codec').decode('ascii') rawtx3_txid = encode( hash256(hex_str_to_bytes(rawtx3))[::-1], 'hex_codec').decode('ascii') # Create a chained TX on top of tx1 inputs = [] n = 0 for out in rawtx1_obj.vout: if out.nValue == 100000000: inputs.append({"txid": rawtx1_txid, "vout": n}) n += 1 rawtx4 = self.nodes[0].createrawtransaction( inputs, {self.nodes[0].getnewaddress(): 0.999}) rawtx4 = self.nodes[0].signrawtransaction(rawtx4)['hex'] rawtx4_txid = self.nodes[0].sendrawtransaction(rawtx4) # wait for transactions to propagate sync_mempools(self.nodes) for node in self.nodes: self.wait_for_instantlock(rawtx1_txid, node) self.wait_for_instantlock(rawtx4_txid, node) block = self.create_block(self.nodes[0], [rawtx2_obj]) if test_block_conflict: # The block shouldn't be accepted/connected but it should be known to node 0 now submit_result = self.nodes[0].submitblock(ToHex(block)) assert (submit_result == "conflict-tx-lock") cl = self.create_chainlock(self.nodes[0].getblockcount() + 1, block.sha256) self.test_node.send_clsig(cl) for node in self.nodes: self.wait_for_best_chainlock(node, "%064x" % block.sha256) sync_blocks(self.nodes) # At this point all nodes should be in sync and have the same "best chainlock" submit_result = self.nodes[1].submitblock(ToHex(block)) if test_block_conflict: # Node 1 should receive the block from node 0 and should not accept it again via submitblock assert (submit_result == "duplicate") else: # The block should get accepted now, and at the same time prune the conflicting ISLOCKs assert (submit_result is None) for node in self.nodes: self.wait_for_chainlocked_block(node, "%064x" % block.sha256) # Create a chained TX on top of tx2 inputs = [] n = 0 for out in rawtx2_obj.vout: if out.nValue == 100000000: inputs.append({"txid": rawtx2_txid, "vout": n}) n += 1 rawtx5 = self.nodes[0].createrawtransaction( inputs, {self.nodes[0].getnewaddress(): 0.999}) rawtx5 = self.nodes[0].signrawtransaction(rawtx5)['hex'] rawtx5_txid = self.nodes[0].sendrawtransaction(rawtx5) # wait for the transaction to propagate sync_mempools(self.nodes) for node in self.nodes: self.wait_for_instantlock(rawtx5_txid, node) # Lets verify that the ISLOCKs got pruned for node in self.nodes: assert_raises_rpc_error( -5, "No such mempool or blockchain transaction", node.getrawtransaction, rawtx1_txid, True) assert_raises_rpc_error( -5, "No such mempool or blockchain transaction", node.getrawtransaction, rawtx4_txid, True) rawtx = node.getrawtransaction(rawtx2_txid, True) assert (rawtx['chainlock']) assert (rawtx['instantlock']) assert (not rawtx['instantlock_internal'])
def run_test(self): tmpdir = self.options.tmpdir nodes = self.nodes nodes[0].extkeyimportmaster(nodes[0].mnemonic('new')['master']) nodes[1].extkeyimportmaster( 'abandon baby cabbage dad eager fabric gadget habit ice kangaroo lab absorb' ) address0 = nodes[0].getnewaddress() # Will be different each run address1 = nodes[1].getnewaddress() assert (address1 == 'pX9N6S76ZtA5BfsiJmqBbjaEgLMHpt58it') sx_addr0 = nodes[0].getnewstealthaddress() nodes[1].sendtypeto('part', 'part', [ { 'address': sx_addr0, 'amount': 20 }, ]) ro = nodes[0].smsglocalkeys() assert (len(ro['wallet_keys']) == 0) ro = nodes[0].smsgaddlocaladdress(address0) assert ('Receiving messages enabled for address' in ro['result']) ro = nodes[0].smsglocalkeys() assert (len(ro['wallet_keys']) == 1) ro = nodes[1].smsgaddaddress(address0, ro['wallet_keys'][0]['public_key']) assert (ro['result'] == 'Public key added to db.') text_1 = "['data':'test','value':1]" ro = nodes[1].smsgsend(address1, address0, text_1, True, 4, True) assert (ro['result'] == 'Not Sent.') assert (isclose(ro['fee'], 0.00086600)) ro = nodes[1].smsgsend(address1, address0, text_1, True, 4) assert (ro['result'] == 'Sent.') self.stakeBlocks(1, nStakeNode=1) for i in range(20): nodes[0].sendtypeto('part', 'anon', [ { 'address': sx_addr0, 'amount': 0.5 }, ]) self.waitForSmsgExchange(1, 1, 0) ro = nodes[0].smsginbox() assert (len(ro['messages']) == 1) assert (ro['messages'][0]['text'] == text_1) self.log.info('Test smsgimportprivkey and smsgdumpprivkey') test_privkey = '7pHSJFY1tNwi6d68UttGzB8YnXq2wFWrBVoadLv4Y6ekJD3L1iKs' address0_1 = 'pasdoMwEn35xQUXFvsChWAQjuG8rEKJQW9' nodes[0].smsgimportprivkey(test_privkey, 'smsg test key') assert (nodes[0].smsgdumpprivkey(address0_1) == test_privkey) text_2 = "['data':'test','value':2]" ro = nodes[0].smsglocalkeys() assert (len(ro['smsg_keys']) == 1) assert (ro['smsg_keys'][0]['address'] == address0_1) ro = nodes[1].smsgaddaddress(address0_1, ro['smsg_keys'][0]['public_key']) assert (ro['result'] == 'Public key added to db.') ro = nodes[1].smsgsend(address1, address0_1, text_2, True, 4) assert (ro['result'] == 'Sent.') self.stakeBlocks(1, nStakeNode=1) self.waitForSmsgExchange(2, 1, 0) ro = nodes[0].smsginbox() assert (len(ro['messages']) == 1) assert (ro['messages'][0]['text'] == text_2) nodes[0].encryptwallet('qwerty234') time.sleep(2) ro = nodes[0].getwalletinfo() assert (ro['encryptionstatus'] == 'Locked') localkeys0 = nodes[0].smsglocalkeys() assert (len(localkeys0['smsg_keys']) == 1) assert (len(localkeys0['wallet_keys']) == 1) assert (localkeys0['smsg_keys'][0]['address'] == address0_1) assert (localkeys0['wallet_keys'][0]['address'] == address0) text_3 = "['data':'test','value':3]" ro = nodes[0].smsglocalkeys() assert (len(ro['smsg_keys']) == 1) assert (ro['smsg_keys'][0]['address'] == address0_1) ro = nodes[1].smsgsend(address1, address0, 'Non paid msg') assert (ro['result'] == 'Sent.') ro = nodes[1].smsgsend(address1, address0_1, text_3, True, 4) assert (ro['result'] == 'Sent.') assert (len(ro['txid']) == 64) self.sync_all() self.stakeBlocks(1, nStakeNode=1) self.waitForSmsgExchange(4, 1, 0) msgid = ro['msgid'] for i in range(5): try: ro = nodes[1].smsg(msgid) assert (ro['location'] == 'outbox') break except Exception as e: time.sleep(1) assert (ro['text'] == text_3) assert (ro['from'] == address1) assert (ro['to'] == address0_1) ro = nodes[0].walletpassphrase("qwerty234", 300) ro = nodes[0].smsginbox() assert (len(ro['messages']) == 2) flat = self.dumpj(ro) assert ('Non paid msg' in flat) assert (text_3 in flat) ro = nodes[0].walletlock() ro = nodes[0].smsginbox("all") assert (len(ro['messages']) == 4) flat = self.dumpj(ro) assert (flat.count('Wallet is locked') == 2) ro = nodes[0].smsg(msgid) assert (ro['read'] == True) ro = nodes[0].smsg(msgid, {'setread': False}) assert (ro['read'] == False) ro = nodes[0].smsg(msgid, {'delete': True}) assert (ro['operation'] == 'Deleted') try: ro = nodes[0].smsg(msgid) assert (False), 'Read deleted msg.' except: pass ro = nodes[0].smsggetpubkey(address0_1) assert ( ro['publickey'] == 'h2UfzZxbhxQPcXDfYTBRGSC7GM77qrLjhtqcmfAnAia9') filepath = tmpdir + '/sendfile.txt' msg = b"msg in file\0after null sep" with open(filepath, 'wb', encoding=None) as fp: fp.write(msg) sendoptions = {'fromfile': True} ro = nodes[1].smsgsend(address1, address0_1, filepath, True, 4, False, sendoptions) assert (ro['result'] == 'Sent.') msgid = ro['msgid'] sendoptions = {'decodehex': True} ro = nodes[1].smsgsend(address1, address0_1, binascii.hexlify(msg).decode("utf-8"), True, 4, False, sendoptions) msgid2 = ro['msgid'] self.stakeBlocks(1, nStakeNode=1) for i in range(5): try: ro = nodes[1].smsg(msgid, {'encoding': 'hex'}) assert (ro['location'] == 'outbox') break except: time.sleep(1) assert (msg == bytes.fromhex(ro['hex'][:-2]) ) # Extra null byte gets tacked on for i in range(5): try: ro = nodes[1].smsg(msgid2, {'encoding': 'hex'}) assert (ro['location'] == 'outbox') break except: time.sleep(1) assert (msg == bytes.fromhex(ro['hex'][:-2])) assert (ro['daysretention'] == 4) ro = nodes[0].smsgoptions('list', True) assert (len(ro['options']) == 3) assert (len(ro['options'][0]['description']) > 0) ro = nodes[0].smsgoptions('set', 'newAddressAnon', 'false') assert ('newAddressAnon = false' in json.dumps(ro)) addr = nodes[0].getnewaddress('smsg test') pubkey = nodes[0].getaddressinfo(addr)['pubkey'] ro = nodes[1].smsgaddaddress(addr, pubkey) assert ('Public key added to db' in json.dumps(ro)) # Wait for sync i = 0 for i in range(10): ro = nodes[0].smsginbox('all') if len(ro['messages']) >= 5: break time.sleep(1) assert (i < 10) self.log.info('Test filtering') ro = nodes[0].smsginbox('all', "'vAlue':2") assert (len(ro['messages']) == 1) ro = nodes[1].smsgoutbox('all', "'vAlue':2") assert (len(ro['messages']) == 1) self.log.info('Test clear and rescan') ro = nodes[0].smsginbox('clear') assert ('Deleted 5 messages' in ro['result']) ro = nodes[0].walletpassphrase("qwerty234", 300) ro = nodes[0].smsgscanbuckets() assert ('Scan Buckets Completed' in ro['result']) ro = nodes[0].smsginbox('all') # Recover 5 + 1 dropped msg assert (len(ro['messages']) == 6) self.log.info('Test smsglocalkeys') addr = nodes[0].getnewaddress() ro = nodes[0].smsglocalkeys('recv', '+', addr) assert ('Address not found' in ro['result']) ro = nodes[0].smsglocalkeys('anon', '+', addr) assert ('Address not found' in ro['result']) ro = nodes[0].smsgaddlocaladdress(addr) assert ('Receiving messages enabled for address' in ro['result']) ro = nodes[0].smsglocalkeys('recv', '-', addr) assert ('Receive off' in ro['key']) assert (addr in ro['key']) ro = nodes[0].smsglocalkeys('anon', '-', addr) assert ('Anon off' in ro['key']) assert (addr in ro['key']) ro = nodes[0].smsglocalkeys('all') n = getIndexAtProperty(ro['wallet_keys'], 'address', addr) assert (ro['wallet_keys'][n]['receive'] == '0') assert (ro['wallet_keys'][n]['anon'] == '0') self.log.info('Test smsgpurge') ro = nodes[0].smsg(msgid, {'encoding': 'hex'}) assert (ro['msgid'] == msgid) nodes[0].smsgpurge(msgid) try: nodes[0].smsg(msgid, {'encoding': 'hex'}) assert (False), 'Purged message in inbox' except JSONRPCException as e: assert ('Unknown message id' in e.error['message']) ro = nodes[0].smsgbuckets() assert (int(ro['total']['numpurged']) == 1) # Sum all buckets num_messages = 0 num_active = 0 for b in ro['buckets']: num_messages += int(b['no. messages']) num_active += int(b['active messages']) assert (num_messages == num_active + 1) self.log.info('Test listunspent include_immature') without_immature = nodes[1].listunspent() with_immature = nodes[1].listunspent( query_options={'include_immature': True}) assert (len(with_immature) > len(without_immature)) self.log.info('Test encoding options') options = {'encoding': 'hex'} ro = nodes[0].smsginbox('all', '', options) assert (len(ro['messages']) == 5) for msg in ro['messages']: assert ('hex' in msg) options = {'encoding': 'text'} ro = nodes[0].smsginbox('all', '', options) assert (len(ro['messages']) == 5) for msg in ro['messages']: assert ('text' in msg) options = {'encoding': 'none'} ro = nodes[0].smsginbox('all', '', options) assert (len(ro['messages']) == 5) for msg in ro['messages']: assert ('text' not in msg) assert ('hex' not in msg) self.log.info('Test disablewallet') assert ('SMSG' in self.dumpj(nodes[2].getnetworkinfo()['localservicesnames'])) assert_raises_rpc_error(-32601, 'Method not found', nodes[2].getwalletinfo) for i in range(20): if nodes[0].smsgbuckets('total')['total']['messages'] != nodes[ 2].smsgbuckets('total')['total']['messages']: time.sleep(0.5) continue break assert (nodes[0].smsgbuckets('total')['total']['messages'] == nodes[2].smsgbuckets('total')['total']['messages']) self.log.info('Test smsggetinfo and smsgsetwallet') ro = nodes[0].smsggetinfo() assert (ro['enabled'] is True) assert (ro['active_wallet'] == '') assert_raises_rpc_error(-1, 'Wallet not found: "abc"', nodes[0].smsgsetwallet, 'abc') nodes[0].smsgsetwallet() ro = nodes[0].smsggetinfo() assert (ro['enabled'] is True) assert (ro['active_wallet'] == 'Not set.') nodes[0].createwallet('new_wallet') assert (len(nodes[0].listwallets()) == 2) nodes[0].smsgsetwallet('new_wallet') ro = nodes[0].smsggetinfo() assert (ro['enabled'] is True) assert (ro['active_wallet'] == 'new_wallet') nodes[0].smsgdisable() ro = nodes[0].smsggetinfo() assert (ro['enabled'] is False) nodes[0].smsgenable() ro = nodes[0].smsggetinfo() assert (ro['enabled'] is True) self.log.info('Test funding from RCT balance') nodes[1].smsginbox() # Clear inbox ro = nodes[1].smsgaddlocaladdress(address1) assert ('Receiving messages enabled for address' in ro['result']) msg = 'Test funding from RCT balance' sendoptions = {'fund_from_rct': True, 'rct_ring_size': 6} sent_msg = nodes[0].smsgsend(address0, address1, msg, True, 4, False, sendoptions) assert (sent_msg['result'] == 'Sent.') fund_tx = nodes[0].getrawtransaction(sent_msg['txid'], True) assert (fund_tx['vin'][0]['type'] == 'anon') ro = nodes[0].smsgoutbox('all', '', {'sending': True}) assert (ro['messages'][0]['msgid'] == sent_msg['msgid']) sync_mempools([nodes[0], nodes[1]]) self.stakeBlocks(1, nStakeNode=1) i = 0 for i in range(20): ro = nodes[1].smsginbox() if len(ro['messages']) > 0: break time.sleep(1) assert (i < 19) assert (msg == ro['messages'][0]['text']) ro = nodes[0].smsgoutbox('all', '', {'sending': True}) assert (len(ro['messages']) == 0)
def run_test(self): # NLAST_POW_BLOCK = 250 - so mine 125 blocks each node (25 consecutive blocks for 5 times) NMATURITY = 100 self.log.info("Mining 250 blocks (125 with node 0 and 125 with node 1)...") for i in range(5): self.generateBatchBlocks(0, 25) sync_blocks(self.nodes) self.generateBatchBlocks(1, 25) sync_blocks(self.nodes) sync_mempools(self.nodes) # Check balances balance0 = 250.0 * (125 - 50) balance1 = 250.0 * (125 - 50) # Last two 25-blocks bursts (for each node) are not mature: NMATURITY = 2 * (2 * 25) immature_balance0 = 250.0 * 50 immature_balance1 = 250.0 * 50 w_info = self.nodes[0].getwalletinfo() assert_equal(w_info["balance"], balance0) assert_equal(w_info["immature_balance"], immature_balance0) self.log.info("Balance for node 0 checks out: %f [%f]" % (balance0, immature_balance0)) w_info = self.nodes[1].getwalletinfo() assert_equal(w_info["balance"], balance1) assert_equal(w_info["immature_balance"], immature_balance1) self.log.info("Balance for node 1 checks out: %f [%f]" % (balance1, immature_balance1)) initial_balance = balance0 initial_immature_balance = immature_balance0 initial_unspent = self.nodes[0].listunspent() # PoS start reached (block 250) - disconnect nodes self.nodes[0].disconnectnode(urllib.parse.urlparse(self.nodes[1].url).hostname + ":" + str(p2p_port(1))) self.nodes[1].disconnectnode(urllib.parse.urlparse(self.nodes[0].url).hostname + ":" + str(p2p_port(0))) self.log.info("Nodes disconnected") # Stake one block with node-0 and save the stake input self.log.info("Staking 1 block with node 0...") self.nodes[0].generate(1) last_block = self.nodes[0].getblock(self.nodes[0].getbestblockhash()) assert(len(last_block["tx"]) > 1) # a PoS block has at least two txes coinstake_txid = last_block["tx"][1] coinstake_tx = self.nodes[0].getrawtransaction(coinstake_txid, True) assert(coinstake_tx["vout"][0]["scriptPubKey"]["hex"] == "") # first output of coinstake is empty stakeinput = coinstake_tx["vin"][0] # The stake input was unspent 1 block ago, now it's not res, utxo = self.findUtxoInList(stakeinput["txid"], stakeinput["vout"], initial_unspent) assert (res and utxo["spendable"]) res, utxo = self.findUtxoInList(stakeinput["txid"], stakeinput["vout"], self.nodes[0].listunspent()) assert (not res or not utxo["spendable"]) self.log.info("Coinstake input %s...%s-%d is no longer spendable." % ( stakeinput["txid"][:9], stakeinput["txid"][-4:], stakeinput["vout"])) # Stake 10 more blocks with node-0 and check balances self.log.info("Staking 10 more blocks with node 0...") self.generateBatchBlocks(0, 10) balance0 = initial_balance + 0 # mined blocks matured (250*11) - staked blocks inputs (250*11) immature_balance0 += 250 * 11 # -mined blocks matured (250*11) + staked blocks (500*11) w_info = self.nodes[0].getwalletinfo() assert_equal(w_info["balance"], balance0) assert_equal(w_info["immature_balance"], immature_balance0) self.log.info("Balance for node 0 checks out: %f [%f]" % (balance0, immature_balance0)) # verify that the stakeinput can't be spent rawtx_unsigned = self.nodes[0].createrawtransaction( [{"txid": str(stakeinput["txid"]), "vout": int(stakeinput["vout"])}], {"xxncEuJK27ygNh7imNfaX8JV6ZQUnoBqzN": 249.99}) rawtx = self.nodes[0].signrawtransaction(rawtx_unsigned) assert(rawtx["complete"]) assert_raises_rpc_error(-25, "Missing inputs",self.nodes[0].sendrawtransaction, rawtx["hex"]) # Stake 12 blocks with node-1 self.log.info("Staking 12 blocks with node 1...") self.generateBatchBlocks(1, 12) balance1 -= 250 * 12 # 0 - staked blocks inputs (250*12) immature_balance1 += 500 * 12 # + staked blocks (500 * 12) w_info = self.nodes[1].getwalletinfo() assert_equal(w_info["balance"], balance1) assert_equal(w_info["immature_balance"], immature_balance1) self.log.info("Balance for node 1 checks out: %f [%f]" % (balance1, immature_balance1)) new_best_hash = self.nodes[1].getbestblockhash() # re-connect and sync nodes and check that node-0 gets on the other chain self.log.info("Connecting and syncing nodes...") connect_nodes_bi(self.nodes, 0, 1) sync_blocks(self.nodes) assert_equal(self.nodes[0].getbestblockhash(), new_best_hash) # check balance of node-0 balance0 = initial_balance + 250 * 12 # + mined blocks matured (250*12) immature_balance0 = initial_immature_balance - 250 * 12 # - mined blocks matured (250*12) w_info = self.nodes[0].getwalletinfo() assert_equal(w_info["balance"], balance0) # <--- !!! THIS FAILS before PR #1043 assert_equal(w_info["immature_balance"], immature_balance0) self.log.info("Balance for node 0 checks out: %f [%f]" % (balance0, immature_balance0)) # check that NOW the original stakeinput is present and spendable res, utxo = self.findUtxoInList(stakeinput["txid"], stakeinput["vout"], self.nodes[0].listunspent()) assert (res and utxo["spendable"]) # <--- !!! THIS FAILS before PR #1043 self.log.info("Coinstake input %s...%s-%d is spendable again." % ( stakeinput["txid"][:9], stakeinput["txid"][-4:], stakeinput["vout"])) self.nodes[0].sendrawtransaction(rawtx["hex"]) self.nodes[1].generate(1) sync_blocks(self.nodes) res, utxo = self.findUtxoInList(stakeinput["txid"], stakeinput["vout"], self.nodes[0].listunspent()) assert (not res or not utxo["spendable"])
def run_test(self): self.nodes[1].generate(100) sync_blocks(self.nodes) balance = self.nodes[0].getbalance() txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) sync_mempools(self.nodes) self.nodes[1].generate(1) # Can not abandon non-wallet transaction assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', lambda: self.nodes[0].abandontransaction(txid='ff' * 32)) # Can not abandon confirmed transaction assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: self.nodes[0].abandontransaction(txid=txA)) sync_blocks(self.nodes) newbalance = self.nodes[0].getbalance() assert(balance - newbalance < Decimal("0.001")) #no more than fees lost balance = newbalance # Disconnect nodes so node0's transactions don't get into node1's mempool disconnect_nodes(self.nodes[0], 1) # Identify the 10btc outputs nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txA, 1)["vout"]) if vout["value"] == Decimal("10")) nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txB, 1)["vout"]) if vout["value"] == Decimal("10")) nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txC, 1)["vout"]) if vout["value"] == Decimal("10")) inputs =[] # spend 10btc outputs from txA and txB inputs.append({"txid":txA, "vout":nA}) inputs.append({"txid":txB, "vout":nB}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998") outputs[self.nodes[1].getnewaddress()] = Decimal("5") signed = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) txAB1 = self.nodes[0].sendrawtransaction(signed["hex"]) # Identify the 14.99998btc output nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998")) #Create a child tx spending AB1 and C inputs = [] inputs.append({"txid":txAB1, "vout":nAB}) inputs.append({"txid":txC, "vout":nC}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996") signed2 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"]) # Create a child tx spending ABC2 signed3_change = Decimal("24.999") inputs = [ {"txid":txABC2, "vout":0} ] outputs = { self.nodes[0].getnewaddress(): signed3_change } signed3 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) # note tx is never directly referenced, only abandoned as a child of the above self.nodes[0].sendrawtransaction(signed3["hex"]) # In mempool txs from self should increase balance from change newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("30") + signed3_change) balance = newbalance # Restart the node with a higher min relay fee so the parent tx is no longer in mempool # TODO: redo with eviction self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) # Verify txs no longer in either node's mempool assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) # Not in mempool txs from self should only reduce balance # inputs are still spent, but change not received newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - signed3_change) # Unconfirmed received funds that are not in mempool, also shouldn't show # up in unconfirmed balance unconfbalance = self.nodes[0].getunconfirmedbalance() + self.nodes[0].getbalance() assert_equal(unconfbalance, newbalance) # Also shouldn't show up in listunspent assert(not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)]) balance = newbalance # Abandon original transaction and verify inputs are available again # including that the child tx was also abandoned self.nodes[0].abandontransaction(txAB1) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("30")) balance = newbalance # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.00001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(self.nodes[0].getbalance(), balance) # But if it is received again then it is unabandoned # And since now in mempool, the change is available # But its child tx remains abandoned self.nodes[0].sendrawtransaction(signed["hex"]) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998")) balance = newbalance # Send child tx again so it is unabandoned self.nodes[0].sendrawtransaction(signed2["hex"]) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996")) balance = newbalance # Remove using high relay fee again self.stop_node(0) self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) assert_equal(len(self.nodes[0].getrawmempool()), 0) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("24.9996")) balance = newbalance # Create a double spend of AB1 by spending again from only A's 10 output # Mine double spend from node 1 inputs =[] inputs.append({"txid":txA, "vout":nA}) outputs = {} outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999") tx = self.nodes[0].createrawtransaction(inputs, outputs) signed = self.nodes[0].signrawtransactionwithwallet(tx) self.nodes[1].sendrawtransaction(signed["hex"]) self.nodes[1].generate(1) connect_nodes(self.nodes[0], 1) sync_blocks(self.nodes) # Verify that B and C's 10 MAC outputs are available for spending again because AB1 is now conflicted newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("20")) balance = newbalance # There is currently a minor bug around this and so this test doesn't work. See Issue #7315 # Invalidate the block with the double spend and B's 10 MAC output should no longer be available # Don't think C's should either self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) newbalance = self.nodes[0].getbalance() #assert_equal(newbalance, balance - Decimal("10")) self.log.info("If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer") self.log.info("conflicted has not resumed causing its inputs to be seen as spent. See Issue #7315") self.log.info(str(balance) + " -> " + str(newbalance) + " ?")
def run_test(self): print("Mining blocks...") self.nodes[0].generate(4) self.sync_all() walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], (3920000 + 300) * 0.97) assert_equal(walletinfo['balance'], 0) self.sync_all() self.nodes[1].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance(), (3920000 + 300) * 0.97) assert_equal(self.nodes[1].getbalance(), 100 * 0.97) assert_equal(self.nodes[2].getbalance(), 0) assert_equal(self.nodes[0].getbalance("*"), (3920000 + 300) * 0.97) assert_equal(self.nodes[1].getbalance("*"), 100 * 0.97) assert_equal(self.nodes[2].getbalance("*"), 0) # Send 210 KOTO from 0 to 2 using sendtoaddress call. # Second transaction will be child of first, and will require a fee self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 210) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 100) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. self.sync_all() self.nodes[0].generate(1) self.sync_all() # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].generate(100) self.sync_all() # node0 should end up with 50 btc in block rewards plus fees, but # minus the 21 plus fees sent to node2 assert_equal(self.nodes[0].getbalance(), Decimal('3802477.99999922')) assert_equal(self.nodes[2].getbalance(), 310) assert_equal(self.nodes[0].getbalance("*"), Decimal('3802477.99999922')) assert_equal(self.nodes[2].getbalance("*"), 310) # Node0 should have three unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 3) # Check 'generated' field of listunspent # Node 0: has one coinbase utxo and two regular utxos assert_equal( sum(int(uxto["generated"] is True) for uxto in node0utxos), 1) # Node 1: has 101 coinbase utxos and no regular utxos node1utxos = self.nodes[1].listunspent(1) assert_equal(len(node1utxos), 101) assert_equal( sum(int(uxto["generated"] is True) for uxto in node1utxos), 101) # Node 2: has no coinbase utxos and two regular utxos node2utxos = self.nodes[2].listunspent(1) assert_equal(len(node2utxos), 2) assert_equal( sum(int(uxto["generated"] is True) for uxto in node2utxos), 0) # Catch an attempt to send a transaction with an absurdly high fee. # Send 10.0 from an utxo of value 100.0 but don't specify a change output, so then # the change of 90.0 becomes the fee, which is greater than estimated fee of 0.0019. inputs = [] outputs = {} for utxo in node2utxos: if utxo["amount"] == Decimal("100.0"): break assert_equal(utxo["amount"], Decimal("100.0")) inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[2].getnewaddress("")] = Decimal("10.0") raw_tx = self.nodes[2].createrawtransaction(inputs, outputs) signed_tx = self.nodes[2].signrawtransaction(raw_tx) try: self.nodes[2].sendrawtransaction(signed_tx["hex"]) except JSONRPCException as e: errorString = e.error['message'] assert ("absurdly high fees" in errorString) assert ("9000000000 > 10000000" in errorString) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[2].getnewaddress("")] = utxo["amount"] raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[2]["hex"], True) # Have node1 mine a block to confirm transactions: self.sync_all() self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), Decimal("3802787.99999922")) assert_equal(self.nodes[0].getbalance("*"), 0) assert_equal(self.nodes[2].getbalance("*"), Decimal("3802787.99999922")) # Send 10 KOTO normal address = self.nodes[0].getnewaddress("") self.nodes[2].settxfee(Decimal('0.001')) self.nodes[2].sendtoaddress(address, 10, "", "", False) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('3802777.99899922')) assert_equal(self.nodes[0].getbalance(), Decimal('10.00000000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('3802777.99899922')) assert_equal(self.nodes[0].getbalance("*"), Decimal('10.00000000')) # Send 10 KOTO with subtract fee from amount self.nodes[2].sendtoaddress(address, 10, "", "", True) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('3802767.99899922')) assert_equal(self.nodes[0].getbalance(), Decimal('19.99900000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('3802767.99899922')) assert_equal(self.nodes[0].getbalance("*"), Decimal('19.99900000')) # Sendmany 10 KOTO self.nodes[2].sendmany("", {address: 10}, 0, "", []) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('3802757.99799922')) assert_equal(self.nodes[0].getbalance(), Decimal('29.99900000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('3802757.99799922')) assert_equal(self.nodes[0].getbalance("*"), Decimal('29.99900000')) # Sendmany 10 KOTO with subtract fee from amount self.nodes[2].sendmany("", {address: 10}, 0, "", [address]) self.sync_all() self.nodes[2].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('3802747.99799922')) assert_equal(self.nodes[0].getbalance(), Decimal('39.99800000')) assert_equal(self.nodes[2].getbalance("*"), Decimal('3802747.99799922')) assert_equal(self.nodes[0].getbalance("*"), Decimal('39.99800000')) # Test ResendWalletTransactions: # Create a couple of transactions, then start up a fourth # node (nodes[3]) and ask nodes[0] to rebroadcast. # EXPECT: nodes[3] should have those transactions in its mempool. txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) sync_mempools(self.nodes) self.nodes.append(start_node(3, self.options.tmpdir)) connect_nodes_bi(self.nodes, 0, 3) sync_blocks(self.nodes) relayed = self.nodes[0].resendwallettransactions() assert_equal(set(relayed), set([txid1, txid2])) sync_mempools(self.nodes) assert (txid1 in self.nodes[3].getrawmempool()) #check if we can list zero value tx as available coins #1. create rawtx #2. hex-changed one output to 0.0 #3. sign and send #4. check if recipient (node0) can list the zero value tx usp = self.nodes[1].listunspent() inputs = [{"txid": usp[0]['txid'], "vout": usp[0]['vout']}] outputs = { self.nodes[1].getnewaddress(): 96.998, self.nodes[0].getnewaddress(): 11.11 } rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace( "c0833842", "00000000") #replace 11.11 with 0.0 (int32) decRawTx = self.nodes[1].decoderawtransaction(rawTx) signedRawTx = self.nodes[1].signrawtransaction(rawTx) decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex']) zeroValueTxid = decRawTx['txid'] self.nodes[1].sendrawtransaction(signedRawTx['hex']) self.sync_all() self.nodes[1].generate(1) #mine a block self.sync_all() unspentTxs = self.nodes[0].listunspent( ) #zero value tx must be in listunspents output found = False for uTx in unspentTxs: if uTx['txid'] == zeroValueTxid: found = True assert_equal(uTx['amount'], Decimal('0.00000000')) assert (found) #do some -walletbroadcast tests stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes( 3, self.options.tmpdir, [["-walletbroadcast=0"], ["-walletbroadcast=0"], ["-walletbroadcast=0"]]) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) self.sync_all() txIdNotBroadcasted = self.nodes[0].sendtoaddress( self.nodes[2].getnewaddress(), 2) txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) self.sync_all() self.nodes[1].generate(1) #mine a block, tx should not be in there self.sync_all() assert_equal(self.nodes[2].getbalance(), Decimal('3802747.99799922')) #should not be changed because tx was not broadcasted assert_equal(self.nodes[2].getbalance("*"), Decimal('3802747.99799922')) #should not be changed because tx was not broadcasted #now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex']) self.sync_all() self.nodes[1].generate(1) self.sync_all() txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) assert_equal(self.nodes[2].getbalance(), Decimal('3802749.99799922')) #should not be assert_equal(self.nodes[2].getbalance("*"), Decimal('3802749.99799922')) #should not be #create another tx txIdNotBroadcasted = self.nodes[0].sendtoaddress( self.nodes[2].getnewaddress(), 2) #restart the nodes with -walletbroadcast=1 stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes(3, self.options.tmpdir) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) sync_blocks(self.nodes) self.nodes[0].generate(1) sync_blocks(self.nodes) #tx should be added to balance because after restarting the nodes tx should be broadcastet assert_equal(self.nodes[2].getbalance(), Decimal('3802751.99799922')) #should not be assert_equal(self.nodes[2].getbalance("*"), Decimal('3802751.99799922')) #should not be # send from node 0 to node 2 taddr mytaddr = self.nodes[2].getnewaddress() mytxid = self.nodes[0].sendtoaddress(mytaddr, 10.0) self.sync_all() self.nodes[0].generate(1) self.sync_all() mybalance = self.nodes[2].z_getbalance(mytaddr) assert_equal(mybalance, Decimal('10.0')) mytxdetails = self.nodes[2].gettransaction(mytxid) myvjoinsplits = mytxdetails["vjoinsplit"] assert_equal(0, len(myvjoinsplits)) # z_sendmany is expected to fail if tx size breaks limit myzaddr = self.nodes[0].z_getnewaddress('sprout') recipients = [] num_t_recipients = 1000 num_z_recipients = 2100 amount_per_recipient = Decimal('0.00000001') errorString = '' for i in range(0, num_t_recipients): newtaddr = self.nodes[2].getnewaddress() recipients.append({ "address": newtaddr, "amount": amount_per_recipient }) for i in range(0, num_z_recipients): newzaddr = self.nodes[2].z_getnewaddress('sprout') recipients.append({ "address": newzaddr, "amount": amount_per_recipient }) # Issue #2759 Workaround START # HTTP connection to node 0 may fall into a state, during the few minutes it takes to process # loop above to create new addresses, that when z_sendmany is called with a large amount of # rpc data in recipients, the connection fails with a 'broken pipe' error. Making a RPC call # to node 0 before calling z_sendmany appears to fix this issue, perhaps putting the HTTP # connection into a good state to handle a large amount of data in recipients. self.nodes[0].getinfo() # Issue #2759 Workaround END try: self.nodes[0].z_sendmany(myzaddr, recipients) except JSONRPCException as e: errorString = e.error['message'] assert ("size of raw transaction would be larger than limit" in errorString) # add zaddr to node 2 myzaddr = self.nodes[2].z_getnewaddress('sprout') # send node 2 taddr to zaddr recipients = [] recipients.append({"address": myzaddr, "amount": 7}) mytxid = wait_and_assert_operationid_status( self.nodes[2], self.nodes[2].z_sendmany(mytaddr, recipients)) self.sync_all() self.nodes[2].generate(1) self.sync_all() # check balances zsendmanynotevalue = Decimal('7.0') zsendmanyfee = Decimal('0.0001') node2utxobalance = Decimal( '3802761.99799922') - zsendmanynotevalue - zsendmanyfee assert_equal(self.nodes[2].getbalance(), node2utxobalance) assert_equal(self.nodes[2].getbalance("*"), node2utxobalance) # check zaddr balance assert_equal(self.nodes[2].z_getbalance(myzaddr), zsendmanynotevalue) # check via z_gettotalbalance resp = self.nodes[2].z_gettotalbalance() assert_equal(Decimal(resp["transparent"]), node2utxobalance) assert_equal(Decimal(resp["private"]), zsendmanynotevalue) assert_equal(Decimal(resp["total"]), node2utxobalance + zsendmanynotevalue) # there should be at least one joinsplit mytxdetails = self.nodes[2].gettransaction(mytxid) myvjoinsplits = mytxdetails["vjoinsplit"] assert_greater_than(len(myvjoinsplits), 0) # the first (probably only) joinsplit should take in all the public value myjoinsplit = self.nodes[2].getrawtransaction(mytxid, 1)["vjoinsplit"][0] assert_equal(myjoinsplit["vpub_old"], zsendmanynotevalue) assert_equal(myjoinsplit["vpub_new"], 0) assert ("onetimePubKey" in myjoinsplit.keys()) assert ("randomSeed" in myjoinsplit.keys()) assert ("ciphertexts" in myjoinsplit.keys()) # send from private note to node 0 and node 2 node0balance = self.nodes[0].getbalance() # 25.99794745 node2balance = self.nodes[2].getbalance() # 16.99790000 recipients = [] recipients.append({ "address": self.nodes[0].getnewaddress(), "amount": 1 }) recipients.append({ "address": self.nodes[2].getnewaddress(), "amount": 1.0 }) wait_and_assert_operationid_status( self.nodes[2], self.nodes[2].z_sendmany(myzaddr, recipients)) self.sync_all() self.nodes[2].generate(1) self.sync_all() node0balance += Decimal('1.0') node2balance += Decimal('1.0') assert_equal(Decimal(self.nodes[0].getbalance()), node0balance) assert_equal(Decimal(self.nodes[0].getbalance("*")), node0balance) assert_equal(Decimal(self.nodes[2].getbalance()), node2balance) assert_equal(Decimal(self.nodes[2].getbalance("*")), node2balance) #send a tx with value in a string (PR#6380 +) txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2") txObj = self.nodes[0].gettransaction(txId) assert_equal(txObj['amount'], Decimal('-2.00000000')) txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001") txObj = self.nodes[0].gettransaction(txId) assert_equal(txObj['amount'], Decimal('-0.00010000')) #check if JSON parser can handle scientific notation in strings txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4") txObj = self.nodes[0].gettransaction(txId) assert_equal(txObj['amount'], Decimal('-0.00010000')) #this should fail errorString = "" try: txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1f-4") except JSONRPCException as e: errorString = e.error['message'] assert_equal("Invalid amount" in errorString, True) errorString = "" try: self.nodes[0].generate( "2" ) #use a string to as block amount parameter must fail because it's not interpreted as amount except JSONRPCException as e: errorString = e.error['message'] assert_equal("not an integer" in errorString, True) myzaddr = self.nodes[0].z_getnewaddress('sprout') recipients = [{"address": myzaddr, "amount": Decimal('0.0')}] errorString = '' # Make sure that amount=0 transactions can use the default fee # without triggering "absurd fee" errors try: myopid = self.nodes[0].z_sendmany(myzaddr, recipients) assert (myopid) except JSONRPCException as e: errorString = e.error['message'] print(errorString) assert (False) # This fee is larger than the default fee and since amount=0 # it should trigger error fee = Decimal('0.1') recipients = [{"address": myzaddr, "amount": Decimal('0.0')}] minconf = 1 errorString = '' try: myopid = self.nodes[0].z_sendmany(myzaddr, recipients, minconf, fee) except JSONRPCException as e: errorString = e.error['message'] assert ('Small transaction amount' in errorString) # This fee is less than default and greater than amount, but still valid fee = Decimal('0.0000001') recipients = [{"address": myzaddr, "amount": Decimal('0.00000001')}] minconf = 1 errorString = '' try: myopid = self.nodes[0].z_sendmany(myzaddr, recipients, minconf, fee) assert (myopid) except JSONRPCException as e: errorString = e.error['message'] print(errorString) assert (False) # Make sure amount=0, fee=0 transaction are valid to add to mempool # though miners decide whether to add to a block fee = Decimal('0.0') minconf = 1 recipients = [{"address": myzaddr, "amount": Decimal('0.0')}] errorString = '' try: myopid = self.nodes[0].z_sendmany(myzaddr, recipients, minconf, fee) assert (myopid) except JSONRPCException as e: errorString = e.error['message'] print(errorString) assert (False)