Example #1
0
    def _test_getchaintxstats(self):
        chaintxstats = self.nodes[0].getchaintxstats(1)
        # 200 txs plus genesis tx
        assert_equal(chaintxstats['txcount'], 201)
        # tx rate should be 1 per 10 minutes, or 1/600
        # we have to round because of binary math
        assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1))

        b1 = self.nodes[0].getblock(self.nodes[0].getblockhash(1))
        b200 = self.nodes[0].getblock(self.nodes[0].getblockhash(200))
        time_diff = b200['mediantime'] - b1['mediantime']

        chaintxstats = self.nodes[0].getchaintxstats()
        assert_equal(chaintxstats['time'], b200['time'])
        assert_equal(chaintxstats['txcount'], 201)
        assert_equal(chaintxstats['window_block_count'], 199)
        assert_equal(chaintxstats['window_tx_count'], 199)
        assert_equal(chaintxstats['window_interval'], time_diff)
        assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199))

        chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1['hash'])
        assert_equal(chaintxstats['time'], b1['time'])
        assert_equal(chaintxstats['txcount'], 2)
        assert_equal(chaintxstats['window_block_count'], 0)
        assert('window_tx_count' not in chaintxstats)
        assert('window_interval' not in chaintxstats)
        assert('txrate' not in chaintxstats)

        assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, 201)
Example #2
0
def test_auxpow(nodes):
    """
    Test behaviour of getauxpow.  Calling getauxpow should reserve
    a key from the pool, but it should be released again if the
    created block is not actually used.  On the other hand, if the
    auxpow is submitted and turned into a block, the keypool should
    be drained.
    """

    nodes[0].walletpassphrase('test', 12000)
    nodes[0].keypoolrefill(1)
    nodes[0].walletlock()
    assert_equal (nodes[0].getwalletinfo()['keypoolsize'], 1)

    nodes[0].getauxblock()
    assert_equal (nodes[0].getwalletinfo()['keypoolsize'], 1)
    nodes[0].generate(1)
    assert_equal (nodes[0].getwalletinfo()['keypoolsize'], 1)
    auxblock = nodes[0].getauxblock()
    assert_equal (nodes[0].getwalletinfo()['keypoolsize'], 1)

    target = reverseHex(auxblock['_target'])
    solved = computeAuxpow(auxblock['hash'], target, True)
    res = nodes[0].getauxblock(auxblock['hash'], solved)
    assert res
    assert_equal(nodes[0].getwalletinfo()['keypoolsize'], 0)

    assert_raises_rpc_error(-12, 'Keypool ran out', nodes[0].getauxblock)
Example #3
0
    def _test_getblockheader(self):
        node = self.nodes[0]

        assert_raises_rpc_error(-5, "Block not found",
                              node.getblockheader, "nonsense")

        besthash = node.getbestblockhash()
        secondbesthash = node.getblockhash(199)
        header = node.getblockheader(besthash)

        assert_equal(header['hash'], besthash)
        assert_equal(header['height'], 200)
        assert_equal(header['confirmations'], 1)
        assert_equal(header['previousblockhash'], secondbesthash)
        assert_is_hex_string(header['chainwork'])
        assert_is_hash_string(header['hash'])
        assert_is_hash_string(header['previousblockhash'])
        assert_is_hash_string(header['merkleroot'])
        assert_is_hash_string(header['bits'], length=None)
        assert isinstance(header['time'], int)
        assert isinstance(header['mediantime'], int)
        assert isinstance(header['nonce'], int)
        assert isinstance(header['version'], int)
        assert isinstance(int(header['versionHex'], 16), int)
        assert isinstance(header['difficulty'], Decimal)
Example #4
0
    def run_test(self):
        node = self.nodes[0]

        self.log.info("test getmemoryinfo")
        memory = node.getmemoryinfo()['locked']
        assert_greater_than(memory['used'], 0)
        assert_greater_than(memory['free'], 0)
        assert_greater_than(memory['total'], 0)
        # assert_greater_than_or_equal() for locked in case locking pages failed at some point
        assert_greater_than_or_equal(memory['locked'], 0)
        assert_greater_than(memory['chunks_used'], 0)
        assert_greater_than(memory['chunks_free'], 0)
        assert_equal(memory['used'] + memory['free'], memory['total'])

        self.log.info("test mallocinfo")
        try:
            mallocinfo = node.getmemoryinfo(mode="mallocinfo")
            self.log.info('getmemoryinfo(mode="mallocinfo") call succeeded')
            tree = ET.fromstring(mallocinfo)
            assert_equal(tree.tag, 'malloc')
        except JSONRPCException:
            self.log.info('getmemoryinfo(mode="mallocinfo") not available')
            assert_raises_rpc_error(-8, 'mallocinfo is only available when compiled with glibc 2.10+', node.getmemoryinfo, mode="mallocinfo")

        assert_raises_rpc_error(-8, "unknown mode foobar", node.getmemoryinfo, mode="foobar")
    def run_test(self):
        chain_height = self.nodes[0].getblockcount()
        assert_equal(chain_height, 200)
        node0_address = self.nodes[0].getnewaddress()

        # Coinbase at height chain_height-100+1 ok in mempool, should
        # get mined. Coinbase at height chain_height-100+2 is
        # is too immature to spend.
        b = [self.nodes[0].getblockhash(n) for n in range(101, 103)]
        coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b]
        spends_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.99) for txid in coinbase_txids]

        spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0])

        # coinbase at height 102 should be too immature to spend
        assert_raises_rpc_error(-26,"bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1])

        # mempool should have just spend_101:
        assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ])

        # mine a block, spend_101 should get confirmed
        self.nodes[0].generate(1)
        assert_equal(set(self.nodes[0].getrawmempool()), set())

        # ... and now height 102 can be spent:
        spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1])
        assert_equal(self.nodes[0].getrawmempool(), [ spend_102_id ])
Example #6
0
def test_small_output_fails(rbf_node, dest_address):
    # cannot bump fee with a too-small output
    rbfid = spend_one_input(rbf_node, dest_address)
    rbf_node.bumpfee(rbfid, {"totalFee": 50000})

    rbfid = spend_one_input(rbf_node, dest_address)
    assert_raises_rpc_error(-4, "Change output is too small", rbf_node.bumpfee, rbfid, {"totalFee": 50001})
    def test_with_lock_outputs(self):
        """Test correct error reporting when trying to sign a locked output"""
        self.nodes[0].encryptwallet("password")

        rawTx = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000'

        assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, rawTx)
Example #8
0
    def run_test (self):
        tmpdir = self.options.tmpdir

        # generate 20 addresses to compare against the dump
        test_addr_count = 20
        addrs = []
        for i in range(0,test_addr_count):
            addr = self.nodes[0].getnewaddress()
            vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath
            addrs.append(vaddr)
        # Should be a no-op:
        self.nodes[0].keypoolrefill()

        # Test scripts dump by adding a P2SH witness and a 1-of-1 multisig address
        witness_addr = self.nodes[0].addwitnessaddress(addrs[0]["address"], True)
        multisig_addr = self.nodes[0].addmultisigaddress(1, [addrs[1]["address"]])
        script_addrs = [witness_addr, multisig_addr]

        # dump unencrypted wallet
        result = self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump")
        assert_equal(result['filename'], os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump"))

        found_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
            read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, script_addrs, None)
        assert_equal(found_addr, test_addr_count)  # all keys must be in the dump
        assert_equal(found_script_addr, 2)  # all scripts must be in the dump
        assert_equal(found_addr_chg, 50)  # 50 blocks where mined
        assert_equal(found_addr_rsv, 90*2) # 90 keys plus 100% internal keys

        #encrypt wallet, restart, unlock and dump
        self.nodes[0].node_encrypt_wallet('test')
        self.start_node(0)
        self.nodes[0].walletpassphrase('test', 10)
        # Should be a no-op:
        self.nodes[0].keypoolrefill()
        self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")

        found_addr, found_script_addr, found_addr_chg, found_addr_rsv, _ = \
            read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, script_addrs, hd_master_addr_unenc)
        assert_equal(found_addr, test_addr_count)
        assert_equal(found_script_addr, 2)
        assert_equal(found_addr_chg, 90*2 + 50)  # old reserve keys are marked as change now
        assert_equal(found_addr_rsv, 90*2) 

        # Overwriting should fail
        assert_raises_rpc_error(-8, "already exists", self.nodes[0].dumpwallet, tmpdir + "/node0/wallet.unencrypted.dump")

        # Restart node with new wallet, and test importwallet
        self.stop_node(0)
        self.start_node(0, ['-wallet=w2'])

        # Make sure the address is not IsMine before import
        result = self.nodes[0].validateaddress(multisig_addr)
        assert(result['ismine'] == False)

        self.nodes[0].importwallet(os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump"))

        # Now check IsMine is true
        result = self.nodes[0].validateaddress(multisig_addr)
        assert(result['ismine'] == True)
Example #9
0
	def _test_getnodeaddresses(self):
        self.nodes[0].add_p2p_connection(P2PInterface())
         # send some addresses to the node via the p2p message addr
        msg = msg_addr()
        imported_addrs = []
        for i in range(256):
            a = "123.123.123.{}".format(i)
            imported_addrs.append(a)
            addr = CAddress()
            addr.time = 100000000
            addr.nServices = NODE_NETWORK | NODE_WITNESS
            addr.ip = a
            addr.port = 21102
            msg.addrs.append(addr)
        self.nodes[0].p2p.send_and_ping(msg)
         # obtain addresses via rpc call and check they were ones sent in before
        REQUEST_COUNT = 10
        node_addresses = self.nodes[0].getnodeaddresses(REQUEST_COUNT)
        assert_equal(len(node_addresses), REQUEST_COUNT)
        for a in node_addresses:
            assert_greater_than(a["time"], 1527811200) # 1st June 2018
            assert_equal(a["services"], NODE_NETWORK | NODE_WITNESS)
            assert a["address"] in imported_addrs
            assert_equal(a["port"], 21102)
         assert_raises_rpc_error(-8, "Address count out of range", self.nodes[0].getnodeaddresses, -1)
         # addrman's size cannot be known reliably after insertion, as hash collisions may occur
        # so only test that requesting a large number of addresses returns less than that
        LARGE_REQUEST_COUNT = 10000
        node_addresses = self.nodes[0].getnodeaddresses(LARGE_REQUEST_COUNT)
        assert_greater_than(LARGE_REQUEST_COUNT, len(node_addresses))
Example #10
0
  def test_create_submit_auxblock (self):
    """
    Test the createauxblock / submitauxblock method pair.
    """

    # Check for errors with wrong parameters.
    assert_raises_rpc_error (-1, None, self.nodes[0].createauxblock)
    assert_raises_rpc_error (-5, "Invalid coinbase payout address",
                             self.nodes[0].createauxblock,
                             "this_an_invalid_address")

    # Fix a coinbase address and construct methods for it.
    coinbaseAddr = self.nodes[0].getnewaddress ()
    def create ():
      return self.nodes[0].createauxblock (coinbaseAddr)
    submit = self.nodes[0].submitauxblock

    # Run common tests.
    self.test_common (create, submit)

    # Ensure that the payout address is the one which we specify
    hash1 = mineAuxpowBlockWithMethods (create, submit)
    hash2 = mineAuxpowBlockWithMethods (create, submit)
    self.sync_all ()
    addr1 = getCoinbaseAddr (self.nodes[1], hash1)
    addr2 = getCoinbaseAddr (self.nodes[1], hash2)
    assert_equal (addr1, coinbaseAddr)
    assert_equal (addr2, coinbaseAddr)
    def mine_block(self, make_transactions):
        # mine block in round robin sense: depending on the block number, a node
        # is selected to create the block, others sign it and the selected node
        # broadcasts it
        mineridx = self.nodes[0].getblockcount() % self.num_nodes # assuming in sync
        mineridx_next = (self.nodes[0].getblockcount() + 1) % self.num_nodes
        miner = self.nodes[mineridx]
        miner_next = self.nodes[mineridx_next]
        blockcount = miner.getblockcount()

        # Make a few transactions to make non-empty blocks for compact transmission
        if make_transactions:
            print(mineridx)
            for i in range(5):
                miner.sendtoaddress(miner_next.getnewaddress(), int(miner.getbalance()['bitcoin']/10), "", "", True)
        # miner makes a block
        block = miner.getnewblockhex()

        # other signing nodes get fed compact blocks
        for i in range(self.num_keys):
            if i == mineridx:
                continue
            sketch = miner.getcompactsketch(block)
            compact_response = self.nodes[i].consumecompactsketch(sketch)
            if make_transactions:
                block_txn =  self.nodes[i].consumegetblocktxn(block, compact_response["block_tx_req"])
                final_block = self.nodes[i].finalizecompactblock(sketch, block_txn, compact_response["found_transactions"])
            else:
                # If there's only coinbase, it should succeed immediately
                final_block = compact_response["blockhex"]
            # Block should be complete, sans signatures
            self.nodes[i].testproposedblock(final_block)

        # non-signing node can not sign
        assert_raises_rpc_error(-25, "Could not sign the block.", self.nodes[-1].signblock, block)

        # collect num_keys signatures from signers, reduce to required_signers sigs during combine
        sigs = []
        for i in range(self.num_keys):
            result = miner.combineblocksigs(block, sigs)
            sigs = sigs + self.nodes[i].signblock(block)
            assert_equal(result["complete"], i >= self.required_signers)
            # submitting should have no effect pre-threshhold
            if i < self.required_signers:
                miner.submitblock(result["hex"])
                self.check_height(blockcount)

        result = miner.combineblocksigs(block, sigs)
        assert_equal(result["complete"], True)

        # All signing nodes must submit... we're not connected!
        self.nodes[0].submitblock(result["hex"])
        early_proposal = self.nodes[0].getnewblockhex() # testproposedblock should reject
        # Submit blocks to all other signing nodes next, as well as too-early block proposal
        for i in range(1, self.num_keys):
            assert_raises_rpc_error(-25, "proposal was not based on our best chain", self.nodes[i].testproposedblock, early_proposal)
            self.nodes[i].submitblock(result["hex"])

        # All nodes should be synced in blocks and transactions(mempool should be empty)
        self.sync_all()
Example #12
0
 def check_addmultisigaddress_errors(self):
     self.log.info('Check that addmultisigaddress fails when the private keys are missing')
     addresses = [self.nodes[1].getnewaddress(address_type='legacy') for _ in range(2)]
     assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
     for a in addresses:
         # Importing all addresses should not change the result
         self.nodes[0].importaddress(a)
     assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
Example #13
0
def test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address):
    # cannot bump fee if the transaction has a descendant
    # parent is send-to-self, so we don't have to check which output is change when creating the child tx
    parent_id = spend_one_input(rbf_node, rbf_node_address)
    tx = rbf_node.createrawtransaction([{"txid": parent_id, "vout": 0}], {dest_address: 0.00020000})
    tx = rbf_node.signrawtransactionwithwallet(tx)
    rbf_node.sendrawtransaction(tx["hex"])
    assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
Example #14
0
  def run_test (self):
    self.node = self.nodes[0]

    # Register a test name to a bech32 pure-segwit address.
    addr = self.node.getnewaddress ("test", "bech32")
    name = "d/test"
    value = "{}"
    new = self.node.name_new (name)
    self.node.generate (10)
    self.firstupdateName (0, name, new, value, {"destAddress": addr})
    self.node.generate (5)
    self.checkNameValueAddr (name, value, addr)

    # Before segwit activation, the script should behave as anyone-can-spend.
    # It will still fail due to non-mandatory flag checks when submitted
    # into the mempool.
    assert_greater_than (SEGWIT_ACTIVATION_HEIGHT, self.node.getblockcount ())
    assert_raises_rpc_error (-26, 'Script failed an OP_EQUALVERIFY operation',
                             self.tryUpdateSegwitName,
                             name, "wrong value", addr)
    self.node.generate (1)
    self.checkNameValueAddr (name, value, addr)

    # But directly in a block, the update should work with a dummy witness.
    assert_equal (self.tryUpdateInBlock (name, "stolen", addr,
                                         withWitness=False),
                  None)
    self.checkNameValueAddr (name, "stolen", addr)

    # Activate segwit.  Since this makes the original name expire, we have
    # to re-register it.
    self.node.generate (400)
    new = self.node.name_new (name)
    self.node.generate (10)
    self.firstupdateName (0, name, new, value, {"destAddress": addr})
    self.node.generate (5)
    self.checkNameValueAddr (name, value, addr)

    # Verify that now trying to update the name without a proper signature
    # fails differently.
    assert_greater_than (self.node.getblockcount (), SEGWIT_ACTIVATION_HEIGHT)
    assert_equal (self.tryUpdateInBlock (name, "wrong value", addr,
                                         withWitness=True),
                  'non-mandatory-script-verify-flag'
                    + ' (Script failed an OP_EQUALVERIFY operation)')
    self.checkNameValueAddr (name, value, addr)

    # Updating the name ordinarily (with signature) should work fine even
    # though it is at a segwit address.  Also spending from P2SH-segwit
    # should work fine.
    addrP2SH = self.node.getnewaddress ("test", "p2sh-segwit")
    self.node.name_update (name, "value 2", {"destAddress": addrP2SH})
    self.node.generate (1)
    self.checkNameValueAddr (name, "value 2", addrP2SH)
    self.node.name_update (name, "value 3", {"destAddress": addr})
    self.node.generate (1)
    self.checkNameValueAddr (name, "value 3", addr)
Example #15
0
    def test_prioritised_transactions(self):
        # Ensure that fee deltas used via prioritisetransaction are
        # correctly used by replacement logic

        # 1. Check that feeperkb uses modified fees
        tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))

        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
        tx1a_hex = txToHex(tx1a)
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)

        # Higher fee, but the actual fee per KB is much lower.
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*740000]))]
        tx1b_hex = txToHex(tx1b)

        # Verify tx1b cannot replace tx1a.
        assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)

        # Use prioritisetransaction to set tx1a's fee to 0.
        self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1*COIN))

        # Now tx1b should be able to replace tx1a
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)

        assert(tx1b_txid in self.nodes[0].getrawmempool())

        # 2. Check that absolute fee checks use modified fee.
        tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))

        tx2a = CTransaction()
        tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
        tx2a_hex = txToHex(tx2a)
        self.nodes[0].sendrawtransaction(tx2a_hex, True)

        # Lower fee, but we'll prioritise it
        tx2b = CTransaction()
        tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2b.vout = [CTxOut(int(1.01 * COIN), CScript([b'a' * 35]))]
        tx2b.rehash()
        tx2b_hex = txToHex(tx2b)

        # Verify tx2b cannot replace tx2a.
        assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, True)

        # Now prioritise tx2b to have a higher modified fee
        self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1*COIN))

        # tx2b should now be accepted
        tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)

        assert(tx2b_txid in self.nodes[0].getrawmempool())
 def run_test(self):
     node = self.nodes[0]
     self.log.info("Test disableprivatekeys creation.")
     self.nodes[0].createwallet('w1', True)
     self.nodes[0].createwallet('w2')
     w1 = node.get_wallet_rpc('w1')
     w2 = node.get_wallet_rpc('w2')
     assert_raises_rpc_error(-4,"Error: Private keys are disabled for this wallet", w1.getnewaddress)
     assert_raises_rpc_error(-4,"Error: Private keys are disabled for this wallet", w1.getrawchangeaddress)
     w1.importpubkey(w2.getaddressinfo(w2.getnewaddress())['pubkey'])
Example #17
0
    def run_test(self):
        self.log.info("estimatefee: Shows deprecated message")
        assert_raises_rpc_error(-32, 'estimatefee is deprecated', self.nodes[0].estimatefee, 1)

        self.log.info("Using -deprecatedrpc=estimatefee bypasses the error")
        self.nodes[1].estimatefee(1)

        self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses")
        assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()])
        self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
Example #18
0
    def _run_nonloopback_tests(self):
        self.log.info("Using interface %s for testing" % self.non_loopback_ip)

        # check only non-loopback interface
        self.run_bind_test([self.non_loopback_ip], self.non_loopback_ip, [self.non_loopback_ip],
            [(self.non_loopback_ip, self.defaultport)])

        # Check that with invalid rpcallowip, we are denied
        self.run_allowip_test([self.non_loopback_ip], self.non_loopback_ip, self.defaultport)
        assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], self.non_loopback_ip, self.defaultport)
Example #19
0
 def _test_getaddednodeinfo(self):
     assert_equal(self.nodes[0].getaddednodeinfo(), [])
     # add a node (node2) to node0
     ip_port = "127.0.0.1:{}".format(p2p_port(2))
     self.nodes[0].addnode(ip_port, 'add')
     # check that the node has indeed been added
     added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
     assert_equal(len(added_nodes), 1)
     assert_equal(added_nodes[0]['addednode'], ip_port)
     # check that a non-existent node returns an error
     assert_raises_rpc_error(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1')
    def run_test (self):
        # Make sure wallet is really disabled
        assert_raises_rpc_error(-32601, 'Method not found', self.nodes[0].getwalletinfo)
        x = self.nodes[0].validateaddress('3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
        assert(x['isvalid'] == False)
        x = self.nodes[0].validateaddress('mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
        assert(x['isvalid'] == True)

        # Checking mining to an address without a wallet. Generating to a valid address should succeed
        # but generating to an invalid address will fail.
        self.nodes[0].generatetoaddress(1, 'mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
        assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].generatetoaddress, 1, '3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
    def run_test(self):
        # Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled.
        assert_raises_rpc_error(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions)

        # Should return an empty array if there aren't unconfirmed wallet transactions.
        self.stop_node(0)
        self.start_node(0, extra_args=[])
        assert_equal(self.nodes[0].resendwallettransactions(), [])

        # Should return an array with the unconfirmed wallet transaction.
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        assert_equal(self.nodes[0].resendwallettransactions(), [txid])
Example #22
0
    def test_too_many_replacements(self):
        """Replacements that evict too many transactions are rejected"""
        # Try directly replacing more than MAX_REPLACEMENT_LIMIT
        # transactions

        # Start by creating a single transaction with many outputs
        initial_nValue = 10*COIN
        utxo = make_utxo(self.nodes[0], initial_nValue)
        fee = int(0.0001*COIN)
        split_value = int((initial_nValue-fee)/(MAX_REPLACEMENT_LIMIT+1))

        outputs = []
        for i in range(MAX_REPLACEMENT_LIMIT+1):
            outputs.append(CTxOut(split_value, CScript([1])))

        splitting_tx = CTransaction()
        splitting_tx.vin = [CTxIn(utxo, nSequence=0)]
        splitting_tx.vout = outputs
        splitting_tx_hex = txToHex(splitting_tx)

        txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, True)
        txid = int(txid, 16)

        # Now spend each of those outputs individually
        for i in range(MAX_REPLACEMENT_LIMIT+1):
            tx_i = CTransaction()
            tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)]
            tx_i.vout = [CTxOut(split_value - fee, CScript([b'a' * 35]))]
            tx_i_hex = txToHex(tx_i)
            self.nodes[0].sendrawtransaction(tx_i_hex, True)

        # Now create doublespend of the whole lot; should fail.
        # Need a big enough fee to cover all spending transactions and have
        # a higher fee rate
        double_spend_value = (split_value-100*fee)*(MAX_REPLACEMENT_LIMIT+1)
        inputs = []
        for i in range(MAX_REPLACEMENT_LIMIT+1):
            inputs.append(CTxIn(COutPoint(txid, i), nSequence=0))
        double_tx = CTransaction()
        double_tx.vin = inputs
        double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
        double_tx_hex = txToHex(double_tx)

        # This will raise an exception
        assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, True)

        # If we remove an input, it should pass
        double_tx = CTransaction()
        double_tx.vin = inputs[0:-1]
        double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
        double_tx_hex = txToHex(double_tx)
        self.nodes[0].sendrawtransaction(double_tx_hex, True)
Example #23
0
def test_settxfee(rbf_node, dest_address):
    assert_raises_rpc_error(-8, "txfee cannot be less than min relay tx fee", rbf_node.settxfee, Decimal('0.000005'))
    assert_raises_rpc_error(-8, "txfee cannot be less than wallet min fee", rbf_node.settxfee, Decimal('0.000015'))
    # check that bumpfee reacts correctly to the use of settxfee (paytxfee)
    rbfid = spend_one_input(rbf_node, dest_address)
    requested_feerate = Decimal("0.00025000")
    rbf_node.settxfee(requested_feerate)
    bumped_tx = rbf_node.bumpfee(rbfid)
    actual_feerate = bumped_tx["fee"] * 1000 / rbf_node.getrawtransaction(bumped_tx["txid"], True)["vsize"]
    # Assert that the difference between the requested feerate and the actual
    # feerate of the bumped transaction is small.
    assert_greater_than(Decimal("0.00001000"), abs(requested_feerate - actual_feerate))
    rbf_node.settxfee(Decimal("0.00000000"))  # unset paytxfee
Example #24
0
  def invalidValue (self, value, encoding):
    """
    Runs tests to check that the various RPC methods treat the given value
    as invalid in the encoding.
    """

    self.setEncodings (valueEnc=encoding)

    assert_raises_rpc_error (-1000, "Name/value is invalid",
                             self.node.name_firstupdate,
                             self.name, "00", 32 * "00", value)
    assert_raises_rpc_error (-1000, "Name/value is invalid",
                             self.node.name_update, self.name, value)

    nameFirst = {
      "op": "name_firstupdate",
      "name": self.name,
      "value": value,
      "rand": "00",
    }
    assert_raises_rpc_error (-1000, "Name/value is invalid",
                             self.nameRawTx, None, nameFirst)
    nameUpd = {
      "op": "name_update",
      "name": self.name,
      "value": value,
    }
    assert_raises_rpc_error (-1000, "Name/value is invalid",
                             self.nameRawTx, None, nameUpd)
Example #25
0
    def run_test(self):
        self.log.info("Mining blocks...")
        self.nodes[0].generate(1)
        self.sync_all()
        self.nodes[1].generate(100)
        self.sync_all()

        # This transaction will be confirmed
        txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10)

        self.nodes[0].generate(1)
        self.sync_all()

        # This transaction will not be confirmed
        txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20)

        # Confirmed and unconfirmed transactions are now in the wallet.
        assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
        assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)

        # Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet.
        self.stop_node(0)
        self.start_node(0)

        assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
        assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)

        # Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed
        # transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
        self.stop_node(0)
        self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"])

        wait_until(lambda: self.nodes[0].getmempoolinfo()[
                   'size'] == 1, timeout=3)

        assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
        assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)

        # Stop node0 and restart with zapwallettxes, but not persistmempool.
        # The unconfirmed transaction is zapped and is no longer in the wallet.
        self.stop_node(0)
        self.start_node(0, ["-zapwallettxes=2"])

        # tx1 is still be available because it was confirmed
        assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)

        # This will raise an exception because the unconfirmed transaction has been zapped
        assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id',
                                self.nodes[0].gettransaction, txid2)
    def run_test(self):
        node = self.nodes[0]
        h = node.help(command='getblockchaininfo')
        assert(h.startswith('getblockchaininfo\n'))

        assert_raises_rpc_error(-8, 'Unknown named parameter', node.help, random='getblockchaininfo')

        h = node.getblockhash(height=0)
        node.getblock(blockhash=h)

        assert_equal(node.echo(), [])
        assert_equal(node.echo(arg0=0,arg9=9), [0] + [None]*8 + [9])
        assert_equal(node.echo(arg1=1), [None, 1])
        assert_equal(node.echo(arg9=None), [None]*10)
        assert_equal(node.echo(arg0=0,arg3=3,arg9=9), [0] + [None]*2 + [3] + [None]*5 + [9])
Example #27
0
    def run_test(self):
        tmpdir = self.options.tmpdir

        # generate 20 addresses to compare against the dump
        test_addr_count = 20
        addrs = []
        for i in range(0, test_addr_count):
            addr = self.nodes[0].getnewaddress()
            vaddr = self.nodes[0].validateaddress(
                addr)  # required to get hd keypath
            addrs.append(vaddr)
        # Should be a no-op:
        self.nodes[0].keypoolrefill()

        # dump unencrypted wallet
        result = self.nodes[0].dumpwallet(
            tmpdir + "/node0/wallet.unencrypted.dump")
        assert_equal(result['filename'], os.path.abspath(
            tmpdir + "/node0/wallet.unencrypted.dump"))

        found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
            read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None)
        # all keys must be in the dump
        assert_equal(found_addr, test_addr_count)
        # 50 blocks where mined
        assert_equal(found_addr_chg, 50)
        # 90 keys plus 100% internal keys
        assert_equal(found_addr_rsv, 90 * 2)

        # encrypt wallet, restart, unlock and dump
        self.nodes[0].node_encrypt_wallet('test')
        self.start_node(0)
        self.nodes[0].walletpassphrase('test', 10)
        # Should be a no-op:
        self.nodes[0].keypoolrefill()
        self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")

        found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
            read_dump(tmpdir + "/node0/wallet.encrypted.dump",
                      addrs, hd_master_addr_unenc)
        assert_equal(found_addr, test_addr_count)
        # old reserve keys are marked as change now
        assert_equal(found_addr_chg, 90 * 2 + 50)
        assert_equal(found_addr_rsv, 90 * 2)

        # Overwriting should fail
        assert_raises_rpc_error(-8, "already exists",
                                self.nodes[0].dumpwallet, tmpdir + "/node0/wallet.unencrypted.dump")
Example #28
0
 def test_invalid_blockhash(self):
     assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock,
                             "42759cde25462784395a337460bde75f58e73d3f08bd31fdc3507cbac856a2c4")
     assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock,
                             "0000000000000000000000000000000000000000000000000000000000000000")
     assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 11, for 'invalid-hex')", self.nodes[0].listsinceblock,
                             "invalid-hex")
     assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'Z000000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].listsinceblock,
                             "Z000000000000000000000000000000000000000000000000000000000000000")
    def run_test(self):
        txouts = gen_return_txouts()
        relayfee = self.nodes[0].getnetworkinfo()['relayfee']

        self.log.info('Check that mempoolminfee is minrelytxfee')
        assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
        assert_equal(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))

        txids = []
        utxos = create_confirmed_utxos(relayfee, self.nodes[0], 91)

        self.log.info('Create a mempool tx that will be evicted')
        us0 = utxos.pop()
        inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
        outputs = {self.nodes[0].getnewaddress() : 0.0001}
        tx = self.nodes[0].createrawtransaction(inputs, outputs)
        self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee
        txF = self.nodes[0].fundrawtransaction(tx)
        self.nodes[0].settxfee(0) # return to automatic fee selection
        txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
        txid = self.nodes[0].sendrawtransaction(txFS['hex'])

        relayfee = self.nodes[0].getnetworkinfo()['relayfee']
        base_fee = relayfee*100
        for i in range (3):
            txids.append([])
            txids[i] = create_lots_of_big_transactions(self.nodes[0], txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee)

        self.log.info('The tx should be evicted by now')
        assert(txid not in self.nodes[0].getrawmempool())
        txdata = self.nodes[0].gettransaction(txid)
        assert(txdata['confirmations'] ==  0) #confirmation should still be 0

        self.log.info('Check that mempoolminfee is larger than minrelytxfee')
        assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
        assert_greater_than(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))

        self.log.info('Create a mempool tx that will not pass mempoolminfee')
        us0 = utxos.pop()
        inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
        outputs = {self.nodes[0].getnewaddress() : 0.0001}
        tx = self.nodes[0].createrawtransaction(inputs, outputs)
        # specifically fund this tx with a fee < mempoolminfee, >= than minrelaytxfee
        txF = self.nodes[0].fundrawtransaction(tx, {'feeRate': relayfee})
        txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
        assert_raises_rpc_error(-26, "mempool min fee not met", self.nodes[0].sendrawtransaction, txFS['hex'])
Example #30
0
    def test_new_unconfirmed_inputs(self):
        """Replacements that add new unconfirmed inputs are rejected"""
        confirmed_utxo = make_utxo(self.nodes[0], int(1.1*COIN))
        unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1*COIN), False)

        tx1 = CTransaction()
        tx1.vin = [CTxIn(confirmed_utxo)]
        tx1.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
        tx1_hex = txToHex(tx1)
        self.nodes[0].sendrawtransaction(tx1_hex, True)

        tx2 = CTransaction()
        tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
        tx2.vout = tx1.vout
        tx2_hex = txToHex(tx2)

        # This will raise an exception
        assert_raises_rpc_error(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, True)
Example #31
0
 def test_instantsend_publishers(self):
     instantsend_publishers = [
         ZMQPublisher.hash_tx_lock,
         ZMQPublisher.raw_tx_lock,
         ZMQPublisher.raw_tx_lock_sig,
         ZMQPublisher.hash_instantsend_doublespend,
         ZMQPublisher.raw_instantsend_doublespend
     ]
     self.log.info("Testing %d InstantSend publishers" % len(instantsend_publishers))
     # Subscribe to InstantSend messages
     self.subscribe(instantsend_publishers)
     # Initialize test node
     self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn())
     network_thread_start()
     self.nodes[0].p2p.wait_for_verack()
     # Make sure all nodes agree
     self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
     # Create two raw TXs, they will conflict with each other
     rpc_raw_tx_1 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)
     rpc_raw_tx_2 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)
     # Send the first transaction and wait for the InstantLock
     rpc_raw_tx_1_hash = self.nodes[0].sendrawtransaction(rpc_raw_tx_1['hex'])
     self.wait_for_instantlock(rpc_raw_tx_1_hash, self.nodes[0])
     # Validate hashtxlock
     zmq_tx_lock_hash = bytes_to_hex_str(self.receive(ZMQPublisher.hash_tx_lock).read(32))
     assert_equal(zmq_tx_lock_hash, rpc_raw_tx_1['txid'])
     # Validate rawtxlock
     zmq_tx_lock_raw = CTransaction()
     zmq_tx_lock_raw.deserialize(self.receive(ZMQPublisher.raw_tx_lock))
     assert(zmq_tx_lock_raw.is_valid())
     assert_equal(zmq_tx_lock_raw.hash, rpc_raw_tx_1['txid'])
     # Validate rawtxlocksig
     zmq_tx_lock_sig_stream = self.receive(ZMQPublisher.raw_tx_lock_sig)
     zmq_tx_lock_tx = CTransaction()
     zmq_tx_lock_tx.deserialize(zmq_tx_lock_sig_stream)
     assert(zmq_tx_lock_tx.is_valid())
     assert_equal(zmq_tx_lock_tx.hash, rpc_raw_tx_1['txid'])
     zmq_tx_lock = msg_islock()
     zmq_tx_lock.deserialize(zmq_tx_lock_sig_stream)
     assert_equal(uint256_to_string(zmq_tx_lock.txid), rpc_raw_tx_1['txid'])
     # Try to send the second transaction. This must throw an RPC error because it conflicts with rpc_raw_tx_1
     # which already got the InstantSend lock.
     assert_raises_rpc_error(-26, "tx-txlock-conflict", self.nodes[0].sendrawtransaction, rpc_raw_tx_2['hex'])
     # Validate hashinstantsenddoublespend
     zmq_double_spend_hash2 = bytes_to_hex_str(self.receive(ZMQPublisher.hash_instantsend_doublespend).read(32))
     zmq_double_spend_hash1 = bytes_to_hex_str(self.receive(ZMQPublisher.hash_instantsend_doublespend).read(32))
     assert_equal(zmq_double_spend_hash2, rpc_raw_tx_2['txid'])
     assert_equal(zmq_double_spend_hash1, rpc_raw_tx_1['txid'])
     # Validate rawinstantsenddoublespend
     zmq_double_spend_tx_2 = CTransaction()
     zmq_double_spend_tx_2.deserialize(self.receive(ZMQPublisher.raw_instantsend_doublespend))
     assert (zmq_double_spend_tx_2.is_valid())
     assert_equal(zmq_double_spend_tx_2.hash, rpc_raw_tx_2['txid'])
     zmq_double_spend_tx_1 = CTransaction()
     zmq_double_spend_tx_1.deserialize(self.receive(ZMQPublisher.raw_instantsend_doublespend))
     assert(zmq_double_spend_tx_1.is_valid())
     assert_equal(zmq_double_spend_tx_1.hash, rpc_raw_tx_1['txid'])
     # No islock notifications when tx is not received yet
     self.nodes[0].generate(1)
     rpc_raw_tx_3 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)
     islock = self.create_islock(rpc_raw_tx_3['hex'])
     self.test_node.send_islock(islock)
     # Validate NO hashtxlock
     time.sleep(1)
     try:
         self.receive(ZMQPublisher.hash_tx_lock, zmq.NOBLOCK)
         assert(False)
     except zmq.ZMQError:
         # this is expected
         pass
     # Now send the tx itself
     self.test_node.send_tx(FromHex(msg_tx(), rpc_raw_tx_3['hex']))
     self.wait_for_instantlock(rpc_raw_tx_3['txid'], self.nodes[0])
     # Validate hashtxlock
     zmq_tx_lock_hash = bytes_to_hex_str(self.receive(ZMQPublisher.hash_tx_lock).read(32))
     assert_equal(zmq_tx_lock_hash, rpc_raw_tx_3['txid'])
     # Drop test node connection
     self.nodes[0].disconnect_p2ps()
     # Unsubscribe from InstantSend messages
     self.unsubscribe(instantsend_publishers)
Example #32
0
    def _test_coin_stats_index(self):
        node = self.nodes[0]
        index_node = self.nodes[1]
        # Both none and muhash options allow the usage of the index
        index_hash_options = ['none', 'muhash']

        # Generate a normal transaction and mine it
        self.generate(node, COINBASE_MATURITY + 1)
        address = self.nodes[0].get_deterministic_priv_key().address
        node.sendtoaddress(address=address,
                           amount=10,
                           subtractfeefromamount=True)
        self.generate(node, 1)

        self.sync_blocks(timeout=120)

        self.log.info(
            "Test that gettxoutsetinfo() output is consistent with or without coinstatsindex option"
        )
        res0 = node.gettxoutsetinfo('none')

        # The fields 'disk_size' and 'transactions' do not exist on the index
        del res0['disk_size'], res0['transactions']

        for hash_option in index_hash_options:
            res1 = index_node.gettxoutsetinfo(hash_option)
            # The fields 'block_info' and 'total_unspendable_amount' only exist on the index
            del res1['block_info'], res1['total_unspendable_amount']
            res1.pop('muhash', None)

            # Everything left should be the same
            assert_equal(res1, res0)

        self.log.info(
            "Test that gettxoutsetinfo() can get fetch data on specific heights with index"
        )

        # Generate a new tip
        self.generate(node, 5)

        for hash_option in index_hash_options:
            # Fetch old stats by height
            res2 = index_node.gettxoutsetinfo(hash_option, 102)
            del res2['block_info'], res2['total_unspendable_amount']
            res2.pop('muhash', None)
            assert_equal(res0, res2)

            # Fetch old stats by hash
            res3 = index_node.gettxoutsetinfo(hash_option, res0['bestblock'])
            del res3['block_info'], res3['total_unspendable_amount']
            res3.pop('muhash', None)
            assert_equal(res0, res3)

            # It does not work without coinstatsindex
            assert_raises_rpc_error(
                -8, "Querying specific block heights requires coinstatsindex",
                node.gettxoutsetinfo, hash_option, 102)

        self.log.info("Test gettxoutsetinfo() with index and verbose flag")

        for hash_option in index_hash_options:
            # Genesis block is unspendable
            res4 = index_node.gettxoutsetinfo(hash_option, 0)
            assert_equal(res4['total_unspendable_amount'], 50)
            assert_equal(
                res4['block_info'], {
                    'unspendable': 50,
                    'prevout_spent': 0,
                    'new_outputs_ex_coinbase': 0,
                    'coinbase': 0,
                    'unspendables': {
                        'genesis_block': 50,
                        'bip30': 0,
                        'scripts': 0,
                        'unclaimed_rewards': 0
                    }
                })
            self.block_sanity_check(res4['block_info'])

            # Test an older block height that included a normal tx
            res5 = index_node.gettxoutsetinfo(hash_option, 102)
            assert_equal(res5['total_unspendable_amount'], 50)
            assert_equal(
                res5['block_info'], {
                    'unspendable': 0,
                    'prevout_spent': 50,
                    'new_outputs_ex_coinbase': Decimal('49.99995560'),
                    'coinbase': Decimal('50.00004440'),
                    'unspendables': {
                        'genesis_block': 0,
                        'bip30': 0,
                        'scripts': 0,
                        'unclaimed_rewards': 0
                    }
                })
            self.block_sanity_check(res5['block_info'])

        # Generate and send a normal tx with two outputs
        tx1_inputs = []
        tx1_outputs = {
            self.nodes[0].getnewaddress(): 21,
            self.nodes[0].getnewaddress(): 42
        }
        raw_tx1 = self.nodes[0].createrawtransaction(tx1_inputs, tx1_outputs)
        funded_tx1 = self.nodes[0].fundrawtransaction(raw_tx1)
        signed_tx1 = self.nodes[0].signrawtransactionwithwallet(
            funded_tx1['hex'])
        tx1_txid = self.nodes[0].sendrawtransaction(signed_tx1['hex'])

        # Find the right position of the 21 BTC output
        tx1_final = self.nodes[0].gettransaction(tx1_txid)
        for output in tx1_final['details']:
            if output['amount'] == Decimal(
                    '21.00000000') and output['category'] == 'receive':
                n = output['vout']

        # Generate and send another tx with an OP_RETURN output (which is unspendable)
        tx2 = CTransaction()
        tx2.vin.append(CTxIn(COutPoint(int(tx1_txid, 16), n), b''))
        tx2.vout.append(
            CTxOut(int(Decimal('20.99') * COIN),
                   CScript([OP_RETURN] + [OP_FALSE] * 30)))
        tx2_hex = self.nodes[0].signrawtransactionwithwallet(
            tx2.serialize().hex())['hex']
        self.nodes[0].sendrawtransaction(tx2_hex)

        # Include both txs in a block
        self.generate(self.nodes[0], 1)
        self.sync_all()

        for hash_option in index_hash_options:
            # Check all amounts were registered correctly
            res6 = index_node.gettxoutsetinfo(hash_option, 108)
            assert_equal(res6['total_unspendable_amount'],
                         Decimal('70.99000000'))
            assert_equal(
                res6['block_info'], {
                    'unspendable': Decimal('20.99000000'),
                    'prevout_spent': 111,
                    'new_outputs_ex_coinbase': Decimal('89.99993620'),
                    'coinbase': Decimal('50.01006380'),
                    'unspendables': {
                        'genesis_block': 0,
                        'bip30': 0,
                        'scripts': Decimal('20.99000000'),
                        'unclaimed_rewards': 0
                    }
                })
            self.block_sanity_check(res6['block_info'])

        # Create a coinbase that does not claim full subsidy and also
        # has two outputs
        cb = create_coinbase(109, nValue=35)
        cb.vout.append(CTxOut(5 * COIN, CScript([OP_FALSE])))
        cb.rehash()

        # Generate a block that includes previous coinbase
        tip = self.nodes[0].getbestblockhash()
        block_time = self.nodes[0].getblock(tip)['time'] + 1
        block = create_block(int(tip, 16), cb, block_time)
        block.solve()
        self.nodes[0].submitblock(block.serialize().hex())
        self.sync_all()

        for hash_option in index_hash_options:
            res7 = index_node.gettxoutsetinfo(hash_option, 109)
            assert_equal(res7['total_unspendable_amount'],
                         Decimal('80.99000000'))
            assert_equal(
                res7['block_info'], {
                    'unspendable': 10,
                    'prevout_spent': 0,
                    'new_outputs_ex_coinbase': 0,
                    'coinbase': 40,
                    'unspendables': {
                        'genesis_block': 0,
                        'bip30': 0,
                        'scripts': 0,
                        'unclaimed_rewards': 10
                    }
                })
            self.block_sanity_check(res7['block_info'])

        self.log.info("Test that the index is robust across restarts")

        res8 = index_node.gettxoutsetinfo('muhash')
        self.restart_node(1, extra_args=self.extra_args[1])
        res9 = index_node.gettxoutsetinfo('muhash')
        assert_equal(res8, res9)

        self.generate(index_node, 1)
        res10 = index_node.gettxoutsetinfo('muhash')
        assert (res8['txouts'] < res10['txouts'])
    def run_test(self):
        self.log.info("Mining blocks...")
        self.nodes[0].generate(101)

        self.sync_all()
        
        # address
        address1 = self.nodes[0].getnewaddress()
        # pubkey
        address2 = self.nodes[0].getnewaddress()
        # privkey
        address3 = self.nodes[0].getnewaddress()
        address3_privkey = self.nodes[0].dumpprivkey(address3)                              # Using privkey

        #Check only one address
        address_info = self.nodes[0].validateaddress(address1)
        assert_equal(address_info['ismine'], True)

        self.sync_all()

        #Node 1 sync test
        assert_equal(self.nodes[1].getblockcount(),101)

        #Address Test - before import
        address_info = self.nodes[1].validateaddress(address1)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        address_info = self.nodes[1].validateaddress(address2)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        address_info = self.nodes[1].validateaddress(address3)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        #Send funds to self
        txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
        self.nodes[0].generate(1)
        rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
        proof1 = self.nodes[0].gettxoutproof([txnid1])

        txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
        self.nodes[0].generate(1)
        rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
        proof2 = self.nodes[0].gettxoutproof([txnid2])

        txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
        self.nodes[0].generate(1)
        rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
        proof3 = self.nodes[0].gettxoutproof([txnid3])

        self.sync_all()

        #Import with no affiliated address
        assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)

        balance1 = self.nodes[1].getbalance("", 0, True)
        assert_equal(balance1, Decimal(0))

        #Import with affiliated address with no rescan
        self.nodes[1].importaddress(address2, "add2", False)
        self.nodes[1].importprunedfunds(rawtxn2, proof2)
        balance2 = self.nodes[1].getbalance("add2", 0, True)
        assert_equal(balance2, Decimal('0.05'))

        #Import with private key with no rescan
        self.nodes[1].importprivkey(privkey=address3_privkey, label="add3", rescan=False)
        self.nodes[1].importprunedfunds(rawtxn3, proof3)
        balance3 = self.nodes[1].getbalance("add3", 0, False)
        assert_equal(balance3, Decimal('0.025'))
        balance3 = self.nodes[1].getbalance("*", 0, True)
        assert_equal(balance3, Decimal('0.075'))

        #Addresses Test - after import
        address_info = self.nodes[1].validateaddress(address1)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)
        address_info = self.nodes[1].validateaddress(address2)
        assert_equal(address_info['iswatchonly'], True)
        assert_equal(address_info['ismine'], False)
        address_info = self.nodes[1].validateaddress(address3)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], True)

        #Remove transactions
        assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", self.nodes[1].removeprunedfunds, txnid1)

        balance1 = self.nodes[1].getbalance("*", 0, True)
        assert_equal(balance1, Decimal('0.075'))

        self.nodes[1].removeprunedfunds(txnid2)
        balance2 = self.nodes[1].getbalance("*", 0, True)
        assert_equal(balance2, Decimal('0.025'))

        self.nodes[1].removeprunedfunds(txnid3)
        balance3 = self.nodes[1].getbalance("*", 0, True)
        assert_equal(balance3, Decimal('0.0'))
Example #34
0
def test_nonrbf_bumpfee_fails(peer_node, dest_address):
    # cannot replace a non RBF transaction (from node which did not enable RBF)
    not_rbfid = peer_node.sendtoaddress(dest_address, Decimal("0.00090000"))
    assert_raises_rpc_error(-4, "not BIP 125 replaceable", peer_node.bumpfee,
                            not_rbfid)
    def run_test(self):
        test_node = self.nodes[0].add_p2p_connection(P2PInterface())
        min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())

        # 1. Have nodes mine a block (leave IBD)
        [
            n.generatetoaddress(1,
                                n.get_deterministic_priv_key().address)
            for n in self.nodes
        ]
        tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes]

        # 2. Send one block that builds on each tip.
        # This should be accepted by node0
        blocks_h2 = []  # the height 2 blocks on each node's chain
        block_time = int(time.time()) + 1
        for i in range(2):
            blocks_h2.append(
                create_block(tips[i], create_coinbase(2), block_time))
            blocks_h2[i].solve()
            block_time += 1
        test_node.send_and_ping(msg_block(blocks_h2[0]))
        min_work_node.send_and_ping(msg_block(blocks_h2[1]))

        assert_equal(self.nodes[0].getblockcount(), 2)
        assert_equal(self.nodes[1].getblockcount(), 1)
        self.log.info(
            "First height 2 block accepted by node0; correctly rejected by node1"
        )

        # 3. Send another block that builds on genesis.
        block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0),
                                 create_coinbase(1), block_time)
        block_time += 1
        block_h1f.solve()
        test_node.send_and_ping(msg_block(block_h1f))

        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h1f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, block_h1f.hash)

        # 4. Send another two block that build on the fork.
        block_h2f = create_block(block_h1f.sha256, create_coinbase(2),
                                 block_time)
        block_time += 1
        block_h2f.solve()
        test_node.send_and_ping(msg_block(block_h2f))

        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h2f.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found

        # But this block should be accepted by node since it has equal work.
        self.nodes[0].getblock(block_h2f.hash)
        self.log.info("Second height 2 block accepted, but not reorg'ed to")

        # 4b. Now send another block that builds on the forking chain.
        block_h3 = create_block(block_h2f.sha256, create_coinbase(3),
                                block_h2f.nTime + 1)
        block_h3.solve()
        test_node.send_and_ping(msg_block(block_h3))

        # Since the earlier block was not processed by node, the new block
        # can't be fully validated.
        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_h3.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        self.nodes[0].getblock(block_h3.hash)

        # But this block should be accepted by node since it has more work.
        self.nodes[0].getblock(block_h3.hash)
        self.log.info("Unrequested more-work block accepted")

        # 4c. Now mine 288 more blocks and deliver; all should be processed but
        # the last (height-too-high) on node (as long as it is not missing any headers)
        tip = block_h3
        all_blocks = []
        for i in range(288):
            next_block = create_block(tip.sha256, create_coinbase(i + 4),
                                      tip.nTime + 1)
            next_block.solve()
            all_blocks.append(next_block)
            tip = next_block

        # Now send the block at height 5 and check that it wasn't accepted (missing header)
        test_node.send_and_ping(msg_block(all_blocks[1]))
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock,
                                all_blocks[1].hash)
        assert_raises_rpc_error(-5, "Block not found",
                                self.nodes[0].getblockheader,
                                all_blocks[1].hash)

        # The block at height 5 should be accepted if we provide the missing header, though
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(all_blocks[0]))
        test_node.send_message(headers_message)
        test_node.send_and_ping(msg_block(all_blocks[1]))
        self.nodes[0].getblock(all_blocks[1].hash)

        # Now send the blocks in all_blocks
        for i in range(288):
            test_node.send_message(msg_block(all_blocks[i]))
        test_node.sync_with_ping()

        # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
        for x in all_blocks[:-1]:
            self.nodes[0].getblock(x.hash)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, all_blocks[-1].hash)

        # 5. Test handling of unrequested block on the node that didn't process
        # Should still not be processed (even though it has a child that has more
        # work).

        # The node should have requested the blocks at some point, so
        # disconnect/reconnect first

        self.nodes[0].disconnect_p2ps()
        self.nodes[1].disconnect_p2ps()

        test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        test_node.send_and_ping(msg_block(block_h1f))
        assert_equal(self.nodes[0].getblockcount(), 2)
        self.log.info(
            "Unrequested block that would complete more-work chain was ignored"
        )

        # 6. Try to get node to request the missing block.
        # Poke the node with an inv for block at height 3 and see if that
        # triggers a getdata on block 2 (it should if block 2 is missing).
        with p2p_lock:
            # Clear state so we can check the getdata request
            test_node.last_message.pop("getdata", None)
            test_node.send_message(msg_inv([CInv(MSG_BLOCK, block_h3.sha256)]))

        test_node.sync_with_ping()
        with p2p_lock:
            getdata = test_node.last_message["getdata"]

        # Check that the getdata includes the right block
        assert_equal(getdata.inv[0].hash, block_h1f.sha256)
        self.log.info("Inv at tip triggered getdata for unprocessed block")

        # 7. Send the missing block for the third time (now it is requested)
        test_node.send_and_ping(msg_block(block_h1f))
        assert_equal(self.nodes[0].getblockcount(), 290)
        self.nodes[0].getblock(all_blocks[286].hash)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, all_blocks[287].hash)
        self.log.info("Successfully reorged to longer chain")

        # 8. Create a chain which is invalid at a height longer than the
        # current chain, but which has more blocks on top of that
        block_289f = create_block(all_blocks[284].sha256, create_coinbase(289),
                                  all_blocks[284].nTime + 1)
        block_289f.solve()
        block_290f = create_block(block_289f.sha256, create_coinbase(290),
                                  block_289f.nTime + 1)
        block_290f.solve()
        block_291 = create_block(block_290f.sha256, create_coinbase(291),
                                 block_290f.nTime + 1)
        # block_291 spends a coinbase below maturity!
        block_291.vtx.append(
            create_tx_with_script(block_290f.vtx[0],
                                  0,
                                  script_sig=b"42",
                                  amount=1))
        block_291.hashMerkleRoot = block_291.calc_merkle_root()
        block_291.solve()
        block_292 = create_block(block_291.sha256, create_coinbase(292),
                                 block_291.nTime + 1)
        block_292.solve()

        # Now send all the headers on the chain and enough blocks to trigger reorg
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_289f))
        headers_message.headers.append(CBlockHeader(block_290f))
        headers_message.headers.append(CBlockHeader(block_291))
        headers_message.headers.append(CBlockHeader(block_292))
        test_node.send_and_ping(headers_message)

        tip_entry_found = False
        for x in self.nodes[0].getchaintips():
            if x['hash'] == block_292.hash:
                assert_equal(x['status'], "headers-only")
                tip_entry_found = True
        assert tip_entry_found
        assert_raises_rpc_error(-1, "Block not found on disk",
                                self.nodes[0].getblock, block_292.hash)

        test_node.send_message(msg_block(block_289f))
        test_node.send_and_ping(msg_block(block_290f))

        self.nodes[0].getblock(block_289f.hash)
        self.nodes[0].getblock(block_290f.hash)

        test_node.send_message(msg_block(block_291))

        # At this point we've sent an obviously-bogus block, wait for full processing
        # without assuming whether we will be disconnected or not
        try:
            # Only wait a short while so the test doesn't take forever if we do get
            # disconnected
            test_node.sync_with_ping(timeout=1)
        except AssertionError:
            test_node.wait_for_disconnect()

            self.nodes[0].disconnect_p2ps()
            test_node = self.nodes[0].add_p2p_connection(P2PInterface())

        # We should have failed reorg and switched back to 290 (but have block 291)
        assert_equal(self.nodes[0].getblockcount(), 290)
        assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
        assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"],
                     -1)

        # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected
        block_293 = create_block(block_292.sha256, create_coinbase(293),
                                 block_292.nTime + 1)
        block_293.solve()
        headers_message = msg_headers()
        headers_message.headers.append(CBlockHeader(block_293))
        test_node.send_message(headers_message)
        test_node.wait_for_disconnect()

        # 9. Connect node1 to node0 and ensure it is able to sync
        self.connect_nodes(0, 1)
        self.sync_blocks([self.nodes[0], self.nodes[1]])
        self.log.info("Successfully synced nodes 1 and 0")
Example #36
0
    def run_test(self):
        chain_height = self.nodes[0].getblockcount()
        assert_equal(chain_height, 200)

        self.log.debug("Mine a single block to get out of IBD")
        self.nodes[0].generate(1)
        self.sync_all()

        self.log.debug("Send 5 transactions from node2 (to its own address)")
        for i in range(5):
            last_txid = self.nodes[2].sendtoaddress(
                self.nodes[2].getnewaddress(), Decimal("10"))
        node2_balance = self.nodes[2].getbalance()
        self.sync_all()

        self.log.debug(
            "Verify that node0 and node1 have 5 transactions in their mempools"
        )
        assert_equal(len(self.nodes[0].getrawmempool()), 5)
        assert_equal(len(self.nodes[1].getrawmempool()), 5)

        self.log.debug("Prioritize a transaction on node0")
        fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
        assert_equal(fees['base'], fees['modified'])
        self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
        fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
        assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])

        self.log.debug("Stop-start the nodes. Verify that node0 has the "
                       "transactions in its mempool and node1 does not. "
                       "Verify that node2 calculates its balance correctly "
                       "after loading wallet transactions.")
        self.stop_nodes()
        # Give this one a head-start, so we can be "extra-sure" that it didn't
        # load anything later
        # Also don't store the mempool, to keep the datadir clean
        self.start_node(1, extra_args=["-persistmempool=0"])
        self.start_node(0)
        self.start_node(2)
        # start_node is blocking on the mempool being loaded
        assert self.nodes[0].getmempoolinfo()["loaded"]
        assert self.nodes[2].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[0].getrawmempool()), 5)
        assert_equal(len(self.nodes[2].getrawmempool()), 5)
        # The others have loaded their mempool. If node_1 loaded anything, we'd
        # probably notice by now:
        assert_equal(len(self.nodes[1].getrawmempool()), 0)

        self.log.debug('Verify prioritization is loaded correctly')
        fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
        assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])

        # Verify accounting of mempool transactions after restart is correct
        # Flush mempool to wallet
        self.nodes[2].syncwithvalidationinterfacequeue()
        assert_equal(node2_balance, self.nodes[2].getbalance())

        self.log.debug(
            "Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file."
        )
        self.stop_nodes()
        self.start_node(0, extra_args=["-persistmempool=0"])
        assert self.nodes[0].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[0].getrawmempool()), 0)

        self.log.debug(
            "Stop-start node0. Verify that it has the transactions in its mempool."
        )
        self.stop_nodes()
        self.start_node(0)
        assert self.nodes[0].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[0].getrawmempool()), 5)

        mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest',
                                   'mempool.dat')
        mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest',
                                   'mempool.dat')
        self.log.debug(
            "Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it"
        )
        os.remove(mempooldat0)
        self.nodes[0].savemempool()
        assert os.path.isfile(mempooldat0)

        self.log.debug(
            "Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions"
        )
        os.rename(mempooldat0, mempooldat1)
        self.stop_nodes()
        self.start_node(1, extra_args=[])
        assert self.nodes[1].getmempoolinfo()["loaded"]
        assert_equal(len(self.nodes[1].getrawmempool()), 5)

        self.log.debug(
            "Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails"
        )
        # to test the exception we are creating a tmp folder called mempool.dat.new
        # which is an implementation detail that could change and break this
        # test
        mempooldotnew1 = mempooldat1 + '.new'
        os.mkdir(mempooldotnew1)
        assert_raises_rpc_error(-1, "Unable to dump mempool to disk",
                                self.nodes[1].savemempool)
        os.rmdir(mempooldotnew1)
Example #37
0
    def run_test(self):
        node = self.nodes[0]

        data_dir = lambda *p: os.path.join(node.datadir, self.chain, *p)
        wallet_dir = lambda *p: data_dir('wallets', *p)
        wallet = lambda name: node.get_wallet_rpc(name)

        def wallet_file(name):
            if os.path.isdir(wallet_dir(name)):
                return wallet_dir(name, "wallet.dat")
            return wallet_dir(name)

        assert_equal(self.nodes[0].listwalletdir(),
                     {'wallets': [{
                         'name': ''
                     }]})

        # check wallet.dat is created
        self.stop_nodes()
        assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True)

        # create symlink to verify wallet directory path can be referenced
        # through symlink
        os.mkdir(wallet_dir('w7'))
        os.symlink('w7', wallet_dir('w7_symlink'))

        # rename wallet.dat to make sure plain wallet file paths (as opposed to
        # directory paths) can be loaded
        os.rename(wallet_dir("wallet.dat"), wallet_dir("w8"))

        # create another dummy wallet for use in testing backups later
        self.start_node(0, [])
        self.stop_nodes()
        empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat')
        os.rename(wallet_dir("wallet.dat"), empty_wallet)

        # restart node with a mix of wallet names:
        #   w1, w2, w3 - to verify new wallets created when non-existing paths specified
        #   w          - to verify wallet name matching works when one wallet path is prefix of another
        #   sub/w5     - to verify relative wallet path is created correctly
        #   extern/w6  - to verify absolute wallet path is created correctly
        #   w7_symlink - to verify symlinked wallet path is initialized correctly
        #   w8         - to verify existing wallet file is loaded correctly
        #   ''         - to verify default wallet file is created correctly
        wallet_names = [
            'w1', 'w2', 'w3', 'w', 'sub/w5',
            os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8',
            ''
        ]
        extra_args = ['-wallet={}'.format(n) for n in wallet_names]
        self.start_node(0, extra_args)
        assert_equal(
            sorted(
                map(lambda w: w['name'],
                    self.nodes[0].listwalletdir()['wallets'])), [
                        '',
                        os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7',
                        'w7_symlink', 'w8'
                    ])

        assert_equal(set(node.listwallets()), set(wallet_names))

        # check that all requested wallets were created
        self.stop_node(0)
        for wallet_name in wallet_names:
            assert_equal(os.path.isfile(wallet_file(wallet_name)), True)

        # should not initialize if wallet path can't be created
        exp_stderr = "boost::filesystem::create_directory:"
        self.nodes[0].assert_start_raises_init_error(
            ['-wallet=wallet.dat/bad'],
            exp_stderr,
            match=ErrorMatch.PARTIAL_REGEX)

        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=wallets'],
            'Error: Specified -walletdir "wallets" does not exist')
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=wallets'],
            'Error: Specified -walletdir "wallets" is a relative path',
            cwd=data_dir())
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=debug.log'],
            'Error: Specified -walletdir "debug.log" is not a directory',
            cwd=data_dir())

        # should not initialize if there are duplicate wallets
        self.nodes[0].assert_start_raises_init_error([
            '-wallet=w1', '-wallet=w1'
        ], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.'
                                                     )

        # should not initialize if one wallet is a copy of another
        shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy'))
        exp_stderr = r"BerkeleyDatabase: Can't open database w8_copy \(duplicates fileid \w+ from w8\)"
        self.nodes[0].assert_start_raises_init_error(
            ['-wallet=w8', '-wallet=w8_copy'],
            exp_stderr,
            match=ErrorMatch.PARTIAL_REGEX)

        # should not initialize if wallet file is a symlink
        os.symlink('w8', wallet_dir('w8_symlink'))
        self.nodes[0].assert_start_raises_init_error(
            ['-wallet=w8_symlink'],
            r'Error: Invalid -wallet path \'w8_symlink\'\. .*',
            match=ErrorMatch.FULL_REGEX)

        # should not initialize if the specified walletdir does not exist
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=bad'],
            'Error: Specified -walletdir "bad" does not exist')
        # should not initialize if the specified walletdir is not a directory
        not_a_dir = wallet_dir('notadir')
        open(not_a_dir, 'a', encoding="utf8").close()
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' +
            not_a_dir + '" is not a directory')

        self.log.info("Do not allow -zapwallettxes with multiwallet")
        self.nodes[0].assert_start_raises_init_error(
            ['-zapwallettxes', '-wallet=w1', '-wallet=w2'],
            "Error: -zapwallettxes is only allowed with a single wallet file")
        self.nodes[0].assert_start_raises_init_error(
            ['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'],
            "Error: -zapwallettxes is only allowed with a single wallet file")
        self.nodes[0].assert_start_raises_init_error(
            ['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'],
            "Error: -zapwallettxes is only allowed with a single wallet file")

        # if wallets/ doesn't exist, datadir should be the default wallet dir
        wallet_dir2 = data_dir('walletdir')
        os.rename(wallet_dir(), wallet_dir2)
        self.start_node(0, ['-wallet=w4', '-wallet=w5'])
        assert_equal(set(node.listwallets()), {"w4", "w5"})
        w5 = wallet("w5")
        node.generatetoaddress(nblocks=1, address=w5.getnewaddress())

        # now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded
        os.rename(wallet_dir2, wallet_dir())
        self.restart_node(
            0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()])
        assert_equal(set(node.listwallets()), {"w4", "w5"})
        w5 = wallet("w5")
        w5_info = w5.getwalletinfo()
        assert_equal(w5_info['immature_balance'], 50)

        competing_wallet_dir = os.path.join(self.options.tmpdir,
                                            'competing_walletdir')
        os.mkdir(competing_wallet_dir)
        self.restart_node(0, ['-walletdir=' + competing_wallet_dir])
        exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\"!"
        self.nodes[1].assert_start_raises_init_error(
            ['-walletdir=' + competing_wallet_dir],
            exp_stderr,
            match=ErrorMatch.PARTIAL_REGEX)

        self.restart_node(0, extra_args)

        assert_equal(
            sorted(
                map(lambda w: w['name'],
                    self.nodes[0].listwalletdir()['wallets'])), [
                        '',
                        os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7',
                        'w7_symlink', 'w8', 'w8_copy'
                    ])

        wallets = [wallet(w) for w in wallet_names]
        wallet_bad = wallet("bad")

        # check wallet names and balances
        node.generatetoaddress(nblocks=1, address=wallets[0].getnewaddress())
        for wallet_name, wallet in zip(wallet_names, wallets):
            info = wallet.getwalletinfo()
            assert_equal(info['immature_balance'],
                         50 if wallet is wallets[0] else 0)
            assert_equal(info['walletname'], wallet_name)

        # accessing invalid wallet fails
        assert_raises_rpc_error(
            -18, "Requested wallet does not exist or is not loaded",
            wallet_bad.getwalletinfo)

        # accessing wallet RPC without using wallet endpoint fails
        assert_raises_rpc_error(-19, "Wallet file not specified",
                                node.getwalletinfo)

        w1, w2, w3, w4, *_ = wallets
        node.generatetoaddress(nblocks=101, address=w1.getnewaddress())
        assert_equal(w1.getbalance(), 100)
        assert_equal(w2.getbalance(), 0)
        assert_equal(w3.getbalance(), 0)
        assert_equal(w4.getbalance(), 0)

        w1.sendtoaddress(w2.getnewaddress(), 1)
        w1.sendtoaddress(w3.getnewaddress(), 2)
        w1.sendtoaddress(w4.getnewaddress(), 3)
        node.generatetoaddress(nblocks=1, address=w1.getnewaddress())
        assert_equal(w2.getbalance(), 1)
        assert_equal(w3.getbalance(), 2)
        assert_equal(w4.getbalance(), 3)

        batch = w1.batch([
            w1.getblockchaininfo.get_request(),
            w1.getwalletinfo.get_request()
        ])
        assert_equal(batch[0]["result"]["chain"], self.chain)
        assert_equal(batch[1]["result"]["walletname"], "w1")

        self.log.info('Check for per-wallet settxfee call')
        assert_equal(w1.getwalletinfo()['paytxfee'], 0)
        assert_equal(w2.getwalletinfo()['paytxfee'], 0)
        w2.settxfee(0.001)
        assert_equal(w1.getwalletinfo()['paytxfee'], 0)
        assert_equal(w2.getwalletinfo()['paytxfee'], Decimal('0.00100000'))

        self.log.info("Test dynamic wallet loading")

        self.restart_node(0, ['-nowallet'])
        assert_equal(node.listwallets(), [])
        assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo)

        self.log.info("Load first wallet")
        loadwallet_name = node.loadwallet(wallet_names[0])
        assert_equal(loadwallet_name['name'], wallet_names[0])
        assert_equal(node.listwallets(), wallet_names[0:1])
        node.getwalletinfo()
        w1 = node.get_wallet_rpc(wallet_names[0])
        w1.getwalletinfo()

        self.log.info("Load second wallet")
        loadwallet_name = node.loadwallet(wallet_names[1])
        assert_equal(loadwallet_name['name'], wallet_names[1])
        assert_equal(node.listwallets(), wallet_names[0:2])
        assert_raises_rpc_error(-19, "Wallet file not specified",
                                node.getwalletinfo)
        w2 = node.get_wallet_rpc(wallet_names[1])
        w2.getwalletinfo()

        self.log.info("Concurrent wallet loading")
        threads = []
        for _ in range(3):
            n = node.cli if self.options.usecli else get_rpc_proxy(
                node.url, 1, timeout=600, coveragedir=node.coverage_dir)
            t = Thread(target=test_load_unload, args=(
                n,
                wallet_names[2],
            ))
            t.start()
            threads.append(t)
        for t in threads:
            t.join()
        global got_loading_error
        assert_equal(got_loading_error, True)

        self.log.info("Load remaining wallets")
        for wallet_name in wallet_names[2:]:
            loadwallet_name = self.nodes[0].loadwallet(wallet_name)
            assert_equal(loadwallet_name['name'], wallet_name)

        assert_equal(set(self.nodes[0].listwallets()), set(wallet_names))

        # Fail to load if wallet doesn't exist
        assert_raises_rpc_error(-18, 'Wallet wallets not found.',
                                self.nodes[0].loadwallet, 'wallets')

        # Fail to load duplicate wallets
        assert_raises_rpc_error(
            -4,
            'Wallet file verification failed. Error loading wallet w1. Duplicate -wallet filename specified.',
            self.nodes[0].loadwallet, wallet_names[0])

        # Fail to load duplicate wallets by different ways (directory and filepath)
        assert_raises_rpc_error(
            -4,
            "Wallet file verification failed. Error loading wallet wallet.dat. Duplicate -wallet filename specified.",
            self.nodes[0].loadwallet, 'wallet.dat')

        # Fail to load if one wallet is a copy of another
        assert_raises_rpc_error(
            -4,
            "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid",
            self.nodes[0].loadwallet, 'w8_copy')

        # Fail to load if one wallet is a copy of another, test this twice to make sure that we don't re-introduce #14304
        assert_raises_rpc_error(
            -4,
            "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid",
            self.nodes[0].loadwallet, 'w8_copy')

        # Fail to load if wallet file is a symlink
        assert_raises_rpc_error(
            -4,
            "Wallet file verification failed. Invalid -wallet path 'w8_symlink'",
            self.nodes[0].loadwallet, 'w8_symlink')

        # Fail to load if a directory is specified that doesn't contain a wallet
        os.mkdir(wallet_dir('empty_wallet_dir'))
        assert_raises_rpc_error(
            -18,
            "Directory empty_wallet_dir does not contain a wallet.dat file",
            self.nodes[0].loadwallet, 'empty_wallet_dir')

        self.log.info("Test dynamic wallet creation.")

        # Fail to create a wallet if it already exists.
        assert_raises_rpc_error(-4, "Wallet w2 already exists.",
                                self.nodes[0].createwallet, 'w2')

        # Successfully create a wallet with a new name
        loadwallet_name = self.nodes[0].createwallet('w9')
        assert_equal(loadwallet_name['name'], 'w9')
        w9 = node.get_wallet_rpc('w9')
        assert_equal(w9.getwalletinfo()['walletname'], 'w9')

        assert 'w9' in self.nodes[0].listwallets()

        # Successfully create a wallet using a full path
        new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir')
        new_wallet_name = os.path.join(new_wallet_dir, 'w10')
        loadwallet_name = self.nodes[0].createwallet(new_wallet_name)
        assert_equal(loadwallet_name['name'], new_wallet_name)
        w10 = node.get_wallet_rpc(new_wallet_name)
        assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name)

        assert new_wallet_name in self.nodes[0].listwallets()

        self.log.info("Test dynamic wallet unloading")

        # Test `unloadwallet` errors
        assert_raises_rpc_error(-1, "JSON value is not a string as expected",
                                self.nodes[0].unloadwallet)
        assert_raises_rpc_error(
            -18, "Requested wallet does not exist or is not loaded",
            self.nodes[0].unloadwallet, "dummy")
        assert_raises_rpc_error(
            -18, "Requested wallet does not exist or is not loaded",
            node.get_wallet_rpc("dummy").unloadwallet)
        assert_raises_rpc_error(-8, "Cannot unload the requested wallet",
                                w1.unloadwallet, "w2"),

        # Successfully unload the specified wallet name
        self.nodes[0].unloadwallet("w1")
        assert 'w1' not in self.nodes[0].listwallets()

        # Successfully unload the wallet referenced by the request endpoint
        # Also ensure unload works during walletpassphrase timeout
        w2.encryptwallet('test')
        w2.walletpassphrase('test', 1)
        w2.unloadwallet()
        time.sleep(1.1)
        assert 'w2' not in self.nodes[0].listwallets()

        # Successfully unload all wallets
        for wallet_name in self.nodes[0].listwallets():
            self.nodes[0].unloadwallet(wallet_name)
        assert_equal(self.nodes[0].listwallets(), [])
        assert_raises_rpc_error(
            -32601,
            "Method not found (wallet method is disabled because no wallet is loaded)",
            self.nodes[0].getwalletinfo)

        # Successfully load a previously unloaded wallet
        self.nodes[0].loadwallet('w1')
        assert_equal(self.nodes[0].listwallets(), ['w1'])
        assert_equal(w1.getwalletinfo()['walletname'], 'w1')

        assert_equal(
            sorted(
                map(lambda w: w['name'],
                    self.nodes[0].listwalletdir()['wallets'])), [
                        '',
                        os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7',
                        'w7_symlink', 'w8', 'w8_copy', 'w9'
                    ])

        # Test backing up and restoring wallets
        self.log.info("Test wallet backup")
        self.restart_node(0, ['-nowallet'])
        for wallet_name in wallet_names:
            self.nodes[0].loadwallet(wallet_name)
        for wallet_name in wallet_names:
            rpc = self.nodes[0].get_wallet_rpc(wallet_name)
            addr = rpc.getnewaddress()
            backup = os.path.join(self.options.tmpdir, 'backup.dat')
            rpc.backupwallet(backup)
            self.nodes[0].unloadwallet(wallet_name)
            shutil.copyfile(empty_wallet, wallet_file(wallet_name))
            self.nodes[0].loadwallet(wallet_name)
            assert_equal(rpc.getaddressinfo(addr)['ismine'], False)
            self.nodes[0].unloadwallet(wallet_name)
            shutil.copyfile(backup, wallet_file(wallet_name))
            self.nodes[0].loadwallet(wallet_name)
            assert_equal(rpc.getaddressinfo(addr)['ismine'], True)

        # Test .walletlock file is closed
        self.start_node(1)
        wallet = os.path.join(self.options.tmpdir, 'my_wallet')
        self.nodes[0].createwallet(wallet)
        assert_raises_rpc_error(
            -4, "Error initializing wallet database environment",
            self.nodes[1].loadwallet, wallet)
        self.nodes[0].unloadwallet(wallet)
        self.nodes[1].loadwallet(wallet)
Example #38
0
    def run_test(self):
        self.log.info("Mining blocks...")
        self.nodes[0].generate(110)

        addr_P2SH_SEGWIT = self.nodes[0].getnewaddress("", "p2sh-segwit")
        pubk1 = self.nodes[0].getaddressinfo(addr_P2SH_SEGWIT)['pubkey']
        addr_LEGACY = self.nodes[0].getnewaddress("", "legacy")
        pubk2 = self.nodes[0].getaddressinfo(addr_LEGACY)['pubkey']
        addr_BECH32 = self.nodes[0].getnewaddress("", "bech32")
        pubk3 = self.nodes[0].getaddressinfo(addr_BECH32)['pubkey']
        self.nodes[0].sendtoaddress(addr_P2SH_SEGWIT, 0.001)
        self.nodes[0].sendtoaddress(addr_LEGACY, 0.002)
        self.nodes[0].sendtoaddress(addr_BECH32, 0.004)

        #send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK
        self.nodes[0].sendtoaddress("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc",
                                    0.008)  # (m/0'/0'/0')
        self.nodes[0].sendtoaddress("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR",
                                    0.016)  # (m/0'/0'/1')
        self.nodes[0].sendtoaddress("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg",
                                    0.032)  # (m/0'/0'/1500')
        self.nodes[0].sendtoaddress("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6",
                                    0.064)  # (m/0'/0'/0)
        self.nodes[0].sendtoaddress("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S",
                                    0.128)  # (m/0'/0'/1)
        self.nodes[0].sendtoaddress("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC",
                                    0.256)  # (m/0'/0'/1500)
        self.nodes[0].sendtoaddress("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7",
                                    0.512)  # (m/1/1/0')
        self.nodes[0].sendtoaddress("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA",
                                    1.024)  # (m/1/1/1')
        self.nodes[0].sendtoaddress("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ",
                                    2.048)  # (m/1/1/1500')
        self.nodes[0].sendtoaddress("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ",
                                    4.096)  # (m/1/1/0)
        self.nodes[0].sendtoaddress("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy",
                                    8.192)  # (m/1/1/1)
        self.nodes[0].sendtoaddress("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq",
                                    16.384)  # (m/1/1/1500)

        self.nodes[0].generate(1)

        self.log.info("Stop node, remove wallet, mine again some blocks...")
        self.stop_node(0)
        shutil.rmtree(os.path.join(self.nodes[0].datadir, "regtest",
                                   'wallets'))
        self.start_node(0)
        self.nodes[0].generate(110)

        scan = self.nodes[0].scantxoutset("start", [])
        info = self.nodes[0].gettxoutsetinfo()
        assert_equal(scan['success'], True)
        assert_equal(scan['height'], info['height'])
        assert_equal(scan['txouts'], info['txouts'])
        assert_equal(scan['bestblock'], info['bestblock'])

        self.restart_node(0, ['-nowallet'])
        self.log.info("Test if we have found the non HD unspent outputs.")
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "pkh(" + pubk1 + ")", "pkh(" + pubk2 + ")",
                "pkh(" + pubk3 + ")"
            ])['total_amount'], Decimal("0.002"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "wpkh(" + pubk1 + ")", "wpkh(" + pubk2 + ")",
                "wpkh(" + pubk3 + ")"
            ])['total_amount'], Decimal("0.004"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "sh(wpkh(" + pubk1 + "))", "sh(wpkh(" + pubk2 + "))",
                "sh(wpkh(" + pubk3 + "))"
            ])['total_amount'], Decimal("0.001"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(" + pubk1 + ")", "combo(" + pubk2 + ")",
                "combo(" + pubk3 + ")"
            ])['total_amount'], Decimal("0.007"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")",
                "addr(" + addr_BECH32 + ")"
            ])['total_amount'], Decimal("0.007"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")",
                "combo(" + pubk3 + ")"
            ])['total_amount'], Decimal("0.007"))

        self.log.info("Test range validation.")
        assert_raises_rpc_error(-8, "End of range is too high",
                                self.nodes[0].scantxoutset, "start",
                                [{
                                    "desc": "desc",
                                    "range": -1
                                }])
        assert_raises_rpc_error(-8, "Range should be greater or equal than 0",
                                self.nodes[0].scantxoutset, "start",
                                [{
                                    "desc": "desc",
                                    "range": [-1, 10]
                                }])
        assert_raises_rpc_error(
            -8, "End of range is too high", self.nodes[0].scantxoutset,
            "start", [{
                "desc": "desc",
                "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]
            }])
        assert_raises_rpc_error(
            -8, "Range specified as [begin,end] must not have begin after end",
            self.nodes[0].scantxoutset, "start", [{
                "desc": "desc",
                "range": [2, 1]
            }])
        assert_raises_rpc_error(-8, "Range is too large",
                                self.nodes[0].scantxoutset, "start",
                                [{
                                    "desc": "desc",
                                    "range": [0, 1000001]
                                }])

        self.log.info("Test extended key derivation.")
        # Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset.
        # Note that all amounts in the UTXO set are powers of 2 multiplied by 0.001 BTR, so each amounts uniquely identifies a subset.
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"
            ])['total_amount'], Decimal("0.008"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"
            ])['total_amount'], Decimal("0.016"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"
            ])['total_amount'], Decimal("0.032"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"
            ])['total_amount'], Decimal("0.064"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"
            ])['total_amount'], Decimal("0.128"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"
            ])['total_amount'], Decimal("0.256"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)",
                "range": 1499
            }])['total_amount'], Decimal("0.024"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)",
                "range": 1500
            }])['total_amount'], Decimal("0.056"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)",
                "range": 1499
            }])['total_amount'], Decimal("0.192"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)",
                "range": 1500
            }])['total_amount'], Decimal("0.448"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"
            ])['total_amount'], Decimal("0.512"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"
            ])['total_amount'], Decimal("1.024"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"
            ])['total_amount'], Decimal("2.048"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"
            ])['total_amount'], Decimal("4.096"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"
            ])['total_amount'], Decimal("8.192"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"
            ])['total_amount'], Decimal("16.384"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"
            ])['total_amount'], Decimal("4.096"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo([abcdef88/1/2'/3/4h]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"
            ])['total_amount'], Decimal("8.192"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"
            ])['total_amount'], Decimal("16.384"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')",
                "range": 1499
            }])['total_amount'], Decimal("1.536"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')",
                "range": 1500
            }])['total_amount'], Decimal("3.584"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)",
                "range": 1499
            }])['total_amount'], Decimal("12.288"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)",
                "range": 1500
            }])['total_amount'], Decimal("28.672"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": 1499
            }])['total_amount'], Decimal("12.288"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": 1500
            }])['total_amount'], Decimal("28.672"))
        assert_equal(
            self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": [1500, 1500]
            }])['total_amount'], Decimal("16.384"))

        # Test the reported descriptors for a few matches
        assert_equal(
            descriptors(self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)",
                "range": 1499
            }])), [
                "pkh([0c5f9a1e/0'/0'/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)#dzxw429x",
                "pkh([0c5f9a1e/0'/0'/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)#43rvceed"
            ])
        assert_equal(
            descriptors(self.nodes[0].scantxoutset("start", [
                "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"
            ])), [
                "pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8"
            ])
        assert_equal(
            descriptors(self.nodes[0].scantxoutset("start", [{
                "desc":
                "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)",
                "range": 1500
            }])), [
                'pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8',
                'pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)#vchwd07g',
                'pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)#z2t3ypsa'
            ])
Example #39
0
 def fail_accept(self, node, error_msg, txid, sign, redeem_script=""):
     assert_raises_rpc_error(-26, error_msg, send_to_witness, use_p2wsh=1, node=node, utxo=getutxo(txid), pubkey=self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=sign, insert_redeem_script=redeem_script)
Example #40
0
    def run_test(self):
        # This test should be used to verify correct behaviour of deprecated
        # RPC methods with and without the -deprecatedrpc flags. For example:
        #
        # self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses")
        # assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()])
        # self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])

        self.log.info("Test validateaddress deprecation")
        SOME_ADDRESS = "mnvGjUy3NMj67yJ6gkK5o9e5RS33Z2Vqcu"  # This is just some random address to pass as a parameter to validateaddress
        dep_validate_address = self.nodes[0].validateaddress(SOME_ADDRESS)
        assert "ismine" not in dep_validate_address
        not_dep_val = self.nodes[1].validateaddress(SOME_ADDRESS)
        assert "ismine" in not_dep_val

        self.log.info("Test accounts deprecation")
        # The following account RPC methods are deprecated:
        # - getaccount
        # - getaccountaddress
        # - getaddressesbyaccount
        # - getreceivedbyaccount
        # - listaccouts
        # - listreceivedbyaccount
        # - move
        # - setaccount
        #
        # The following 'label' RPC methods are usable both with and without the
        # -deprecatedrpc=accounts switch enabled.
        # - getaddressesbylabel
        # - getreceivedbylabel
        # - listlabels
        # - listreceivedbylabel
        # - setlabel
        #
        address0 = self.nodes[0].getnewaddress()
        self.nodes[0].generatetoaddress(101, address0)
        self.sync_all()
        address1 = self.nodes[1].getnewaddress()
        self.nodes[1].generatetoaddress(101, address1)

        self.log.info("- getaccount")
        assert_raises_rpc_error(-32, "getaccount is deprecated",
                                self.nodes[0].getaccount, address0)
        self.nodes[1].getaccount(address1)

        self.log.info("- setaccount")
        assert_raises_rpc_error(-32, "setaccount is deprecated",
                                self.nodes[0].setaccount, address0, "label0")
        self.nodes[1].setaccount(address1, "label1")

        self.log.info("- setlabel")
        self.nodes[0].setlabel(address0, "label0")
        self.nodes[1].setlabel(address1, "label1")

        self.log.info("- getaccountaddress")
        assert_raises_rpc_error(-32, "getaccountaddress is deprecated",
                                self.nodes[0].getaccountaddress, "label0")
        self.nodes[1].getaccountaddress("label1")

        self.log.info("- getaddressesbyaccount")
        assert_raises_rpc_error(-32, "getaddressesbyaccount is deprecated",
                                self.nodes[0].getaddressesbyaccount, "label0")
        self.nodes[1].getaddressesbyaccount("label1")

        self.log.info("- getaddressesbylabel")
        self.nodes[0].getaddressesbylabel("label0")
        self.nodes[1].getaddressesbylabel("label1")

        self.log.info("- getreceivedbyaccount")
        assert_raises_rpc_error(-32, "getreceivedbyaccount is deprecated",
                                self.nodes[0].getreceivedbyaccount, "label0")
        self.nodes[1].getreceivedbyaccount("label1")

        self.log.info("- getreceivedbylabel")
        self.nodes[0].getreceivedbylabel("label0")
        self.nodes[1].getreceivedbylabel("label1")

        self.log.info("- listaccounts")
        assert_raises_rpc_error(-32, "listaccounts is deprecated",
                                self.nodes[0].listaccounts)
        self.nodes[1].listaccounts()

        self.log.info("- listlabels")
        self.nodes[0].listlabels()
        self.nodes[1].listlabels()

        self.log.info("- listreceivedbyaccount")
        assert_raises_rpc_error(-32, "listreceivedbyaccount is deprecated",
                                self.nodes[0].listreceivedbyaccount)
        self.nodes[1].listreceivedbyaccount()

        self.log.info("- listreceivedbylabel")
        self.nodes[0].listreceivedbylabel()
        self.nodes[1].listreceivedbylabel()

        self.log.info("- move")
        assert_raises_rpc_error(-32, "move is deprecated", self.nodes[0].move,
                                "label0", "label0b", 10)
        self.nodes[1].move("label1", "label1b", 10)
Example #41
0
    def run_test(self):
        self.log.info('prepare some coins for multiple *rawtransaction commands')
        self.nodes[2].generate(1)
        self.sync_all()
        self.nodes[0].generate(101)
        self.sync_all()
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
        self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
        self.sync_all()
        self.nodes[0].generate(5)
        self.sync_all()

        self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
        block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
        assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])

        self.log.info('Check parameter types and required parameters of createrawtransaction')
        # Test `createrawtransaction` required parameters
        assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
        assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])

        # Test `createrawtransaction` invalid extra parameters
        assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo')

        # Test `createrawtransaction` invalid `inputs`
        txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000'
        assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {})
        assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {})
        assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {})
        assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {})
        assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
        assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})

        # Test `createrawtransaction` invalid `outputs`
        address = self.nodes[0].getnewaddress()
        address2 = self.nodes[0].getnewaddress()
        assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo')
        self.nodes[0].createrawtransaction(inputs=[], outputs={})  # Should not throw for backwards compatibility
        self.nodes[0].createrawtransaction(inputs=[], outputs=[])
        assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
        assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
        assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
        assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
        assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
        assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
        assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}])
        assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")]))
        assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
        assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])

        # Test `createrawtransaction` invalid `locktime`
        assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
        assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1)
        assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296)

        # Test `createrawtransaction` invalid `replaceable`
        assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')

        self.log.info('Check that createrawtransaction accepts an array and object as outputs')
        tx = CTransaction()
        # One output
        tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
        assert_equal(len(tx.vout), 1)
        assert_equal(
            tx.serialize().hex(),
            self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
        )
        # Two outputs
        tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
        assert_equal(len(tx.vout), 2)
        assert_equal(
            tx.serialize().hex(),
            self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
        )
        # Multiple mixed outputs
        tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
        assert_equal(len(tx.vout), 3)
        assert_equal(
            tx.serialize().hex(),
            self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]),
        )

        for type in ["bech32", "p2sh-segwit", "legacy"]:
            addr = self.nodes[0].getnewaddress("", type)
            addrinfo = self.nodes[0].getaddressinfo(addr)
            pubkey = addrinfo["scriptPubKey"]

            self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type))

            # Test `signrawtransactionwithwallet` invalid `prevtxs`
            inputs  = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}]
            outputs = { self.nodes[0].getnewaddress() : 1 }
            rawtx   = self.nodes[0].createrawtransaction(inputs, outputs)

            prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)
            succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
            assert succ["complete"]
            if type == "legacy":
                del prevtx["amount"]
                succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
                assert succ["complete"]

            if type != "legacy":
                assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                    {
                        "txid": txid,
                        "scriptPubKey": pubkey,
                        "vout": 3,
                    }
                ])

            assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                {
                    "txid": txid,
                    "scriptPubKey": pubkey,
                    "amount": 1,
                }
            ])
            assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                {
                    "scriptPubKey": pubkey,
                    "vout": 3,
                    "amount": 1,
                }
            ])
            assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                {
                    "txid": txid,
                    "vout": 3,
                    "amount": 1
                }
            ])

        #########################################
        # sendrawtransaction with missing input #
        #########################################

        self.log.info('sendrawtransaction with missing input')
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
        outputs = { self.nodes[0].getnewaddress() : 4.998 }
        rawtx   = self.nodes[2].createrawtransaction(inputs, outputs)
        rawtx   = self.nodes[2].signrawtransactionwithwallet(rawtx)

        # This will raise an exception since there are missing inputs
        assert_raises_rpc_error(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex'])

        #####################################
        # getrawtransaction with block hash #
        #####################################

        # make a tx by sending then generate 2 blocks; block1 has the tx in it
        tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1)
        block1, block2 = self.nodes[2].generate(2)
        self.sync_all()
        # We should be able to get the raw transaction by providing the correct block
        gottx = self.nodes[0].getrawtransaction(tx, True, block1)
        assert_equal(gottx['txid'], tx)
        assert_equal(gottx['in_active_chain'], True)
        # We should not have the 'in_active_chain' flag when we don't provide a block
        gottx = self.nodes[0].getrawtransaction(tx, True)
        assert_equal(gottx['txid'], tx)
        assert 'in_active_chain' not in gottx
        # We should not get the tx if we provide an unrelated block
        assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2)
        # An invalid block hash should raise the correct errors
        assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True)
        assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar")
        assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234")
        assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000")
        assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000")
        # Undo the blocks and check in_active_chain
        self.nodes[0].invalidateblock(block1)
        gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
        assert_equal(gottx['in_active_chain'], False)
        self.nodes[0].reconsiderblock(block1)
        assert_equal(self.nodes[0].getbestblockhash(), block2)

        #########################
        # RAW TX MULTISIG TESTS #
        #########################
        # 2of2 test
        addr1 = self.nodes[2].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()

        addr1Obj = self.nodes[2].getaddressinfo(addr1)
        addr2Obj = self.nodes[2].getaddressinfo(addr2)

        # Tests for createmultisig and addmultisigaddress
        assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"])
        self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys
        assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here.

        mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address']

        #use balance deltas instead of absolute values
        bal = self.nodes[2].getbalance()

        # send 1.2 BTC to msig adr
        txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance


        # 2of3 test from different nodes
        bal = self.nodes[2].getbalance()
        addr1 = self.nodes[1].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()
        addr3 = self.nodes[2].getnewaddress()

        addr1Obj = self.nodes[1].getaddressinfo(addr1)
        addr2Obj = self.nodes[2].getaddressinfo(addr2)
        addr3Obj = self.nodes[2].getaddressinfo(addr3)

        mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']

        txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
        decTx = self.nodes[0].gettransaction(txId)
        rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()

        #THIS IS AN INCOMPLETE FEATURE
        #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
        assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable

        txDetails = self.nodes[0].gettransaction(txId, True)
        rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
        vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000'))

        bal = self.nodes[0].getbalance()
        inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}]
        outputs = { self.nodes[0].getnewaddress() : 2.19 }
        rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
        rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
        assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx

        rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
        assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
        self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
        rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx

        # 2of2 test for combining transactions
        bal = self.nodes[2].getbalance()
        addr1 = self.nodes[1].getnewaddress()
        addr2 = self.nodes[2].getnewaddress()

        addr1Obj = self.nodes[1].getaddressinfo(addr1)
        addr2Obj = self.nodes[2].getaddressinfo(addr2)

        self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
        mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
        mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)

        txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
        decTx = self.nodes[0].gettransaction(txId)
        rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()

        assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable

        txDetails = self.nodes[0].gettransaction(txId, True)
        rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
        vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000'))

        bal = self.nodes[0].getbalance()
        inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}]
        outputs = { self.nodes[0].getnewaddress() : 2.19 }
        rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
        rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
        self.log.debug(rawTxPartialSigned1)
        assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx

        rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
        self.log.debug(rawTxPartialSigned2)
        assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
        rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
        self.log.debug(rawTxComb)
        self.nodes[2].sendrawtransaction(rawTxComb)
        rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
        self.sync_all()
        self.nodes[0].generate(1)
        self.sync_all()
        assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx

        # decoderawtransaction tests
        # witness transaction
        encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000"
        decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction
        assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
        assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction
        # non-witness transaction
        encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000"
        decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
        assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))

        # getrawtransaction tests
        # 1. valid parameters - only supply txid
        txId = rawTx["txid"]
        assert_equal(self.nodes[0].getrawtransaction(txId), rawTxSigned['hex'])

        # 2. valid parameters - supply txid and 0 for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex'])

        # 3. valid parameters - supply txid and False for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned['hex'])

        # 4. valid parameters - supply txid and 1 for verbose.
        # We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
        assert_equal(self.nodes[0].getrawtransaction(txId, 1)["hex"], rawTxSigned['hex'])

        # 5. valid parameters - supply txid and True for non-verbose
        assert_equal(self.nodes[0].getrawtransaction(txId, True)["hex"], rawTxSigned['hex'])

        # 6. invalid parameters - supply txid and string "Flase"
        assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, "Flase")

        # 7. invalid parameters - supply txid and empty array
        assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, [])

        # 8. invalid parameters - supply txid and empty dict
        assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, {})

        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        rawtx   = self.nodes[0].createrawtransaction(inputs, outputs)
        decrawtx= self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['vin'][0]['sequence'], 1000)

        # 9. invalid parameters - sequence number out of range
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)

        # 10. invalid parameters - sequence number out of range
        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)

        inputs  = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
        outputs = { self.nodes[0].getnewaddress() : 1 }
        rawtx   = self.nodes[0].createrawtransaction(inputs, outputs)
        decrawtx= self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)

        ####################################
        # TRANSACTION VERSION NUMBER TESTS #
        ####################################

        # Test the minimum transaction version number that fits in a signed 32-bit integer.
        # As transaction version is unsigned, this should convert to its unsigned equivalent.
        tx = CTransaction()
        tx.nVersion = -0x80000000
        rawtx = ToHex(tx)
        decrawtx = self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['version'], 0x80000000)

        # Test the maximum transaction version number that fits in a signed 32-bit integer.
        tx = CTransaction()
        tx.nVersion = 0x7fffffff
        rawtx = ToHex(tx)
        decrawtx = self.nodes[0].decoderawtransaction(rawtx)
        assert_equal(decrawtx['version'], 0x7fffffff)

        self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate')

        # Test a transaction with a small fee.
        txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
        rawTx = self.nodes[0].getrawtransaction(txId, True)
        vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000'))

        self.sync_all()
        inputs = [{ "txid" : txId, "vout" : vout['n'] }]
        # Fee 10,000 satoshis, (1 - (10000 sat * 0.00000001 BTC/sat)) = 0.9999
        outputs = { self.nodes[0].getnewaddress() : Decimal("0.99990000") }
        rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
        rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
        assert_equal(rawTxSigned['complete'], True)
        # Fee 10,000 satoshis, ~100 b transaction, fee rate should land around 100 sat/byte = 0.00100000 BTC/kB
        # Thus, testmempoolaccept should reject
        testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']], 0.00001000)[0]
        assert_equal(testres['allowed'], False)
        assert_equal(testres['reject-reason'], '256: absurdly-high-fee')
        # and sendrawtransaction should throw
        assert_raises_rpc_error(-26, "absurdly-high-fee", self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000)
        # and the following calls should both succeed
        testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']])[0]
        assert_equal(testres['allowed'], True)
        self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'])

        # Test a transaction with a large fee.
        txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
        rawTx = self.nodes[0].getrawtransaction(txId, True)
        vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000'))

        self.sync_all()
        inputs = [{ "txid" : txId, "vout" : vout['n'] }]
        # Fee 2,000,000 satoshis, (1 - (2000000 sat * 0.00000001 BTC/sat)) = 0.98
        outputs = { self.nodes[0].getnewaddress() : Decimal("0.98000000") }
        rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
        rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
        assert_equal(rawTxSigned['complete'], True)
        # Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 0.20000000 BTC/kB
        # Thus, testmempoolaccept should reject
        testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0]
        assert_equal(testres['allowed'], False)
        assert_equal(testres['reject-reason'], '256: absurdly-high-fee')
        # and sendrawtransaction should throw
        assert_raises_rpc_error(-26, "absurdly-high-fee", self.nodes[2].sendrawtransaction, rawTxSigned['hex'])
        # and the following calls should both succeed
        testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']], maxfeerate='0.20000000')[0]
        assert_equal(testres['allowed'], True)
        self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'], maxfeerate='0.20000000')
Example #42
0
    def run_test(self):
        wallet_unenc_dump = os.path.join(self.nodes[0].datadir, "wallet.unencrypted.dump")
        wallet_enc_dump = os.path.join(self.nodes[0].datadir, "wallet.encrypted.dump")

        # generate 30 addresses to compare against the dump
        # - 10 legacy P2PKH
        # - 10 P2SH-segwit
        # - 10 bech32
        test_addr_count = 10
        addrs = []
        for address_type in ['legacy', 'p2sh-segwit', 'bech32']:
            for i in range(0, test_addr_count):
                addr = self.nodes[0].getnewaddress(address_type=address_type)
                vaddr = self.nodes[0].getaddressinfo(addr)  # required to get hd keypath
                addrs.append(vaddr)

        # Test scripts dump by adding a 1-of-1 multisig address
        multisig_addr = self.nodes[0].addmultisigaddress(1, [addrs[1]["address"]])["address"]

        # Refill the keypool. getnewaddress() refills the keypool *before* taking a key from
        # the keypool, so the final call to getnewaddress leaves the keypool with one key below
        # its capacity
        self.nodes[0].keypoolrefill()

        # dump unencrypted wallet
        result = self.nodes[0].dumpwallet(wallet_unenc_dump)
        assert_equal(result['filename'], wallet_unenc_dump)

        found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
            read_dump(wallet_unenc_dump, addrs, [multisig_addr], None)
        assert_equal(found_legacy_addr, test_addr_count)  # all keys must be in the dump
        assert_equal(found_p2sh_segwit_addr, test_addr_count)  # all keys must be in the dump
        assert_equal(found_bech32_addr, test_addr_count)  # all keys must be in the dump
        assert_equal(found_script_addr, 1)  # all scripts must be in the dump
        assert_equal(found_addr_chg, 0)  # 0 blocks where mined
        assert_equal(found_addr_rsv, 90 * 2)  # 90 keys plus 100% internal keys

        # encrypt wallet, restart, unlock and dump
        self.nodes[0].encryptwallet('test')
        self.nodes[0].walletpassphrase('test', 10)
        # Should be a no-op:
        self.nodes[0].keypoolrefill()
        self.nodes[0].dumpwallet(wallet_enc_dump)

        found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, _ = \
            read_dump(wallet_enc_dump, addrs, [multisig_addr], hd_master_addr_unenc)
        assert_equal(found_legacy_addr, test_addr_count)  # all keys must be in the dump
        assert_equal(found_p2sh_segwit_addr, test_addr_count)  # all keys must be in the dump
        assert_equal(found_bech32_addr, test_addr_count)  # all keys must be in the dump
        assert_equal(found_script_addr, 1)
        assert_equal(found_addr_chg, 90 * 2)  # old reserve keys are marked as change now
        assert_equal(found_addr_rsv, 90 * 2)

        # Overwriting should fail
        assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump))

        # Restart node with new wallet, and test importwallet
        self.stop_node(0)
        self.start_node(0, ['-wallet=w2'])

        # Make sure the address is not IsMine before import
        result = self.nodes[0].getaddressinfo(multisig_addr)
        assert not result['ismine']

        self.nodes[0].importwallet(wallet_unenc_dump)

        # Now check IsMine is true
        result = self.nodes[0].getaddressinfo(multisig_addr)
        assert result['ismine']

        # Overwriting should fail
        assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump))
Example #43
0
    def run_test(self):
        node = self.nodes[0]

        data_dir = lambda *p: os.path.join(node.datadir, self.chain, *p)
        wallet_dir = lambda *p: data_dir('wallets', *p)
        wallet = lambda name: node.get_wallet_rpc(name)

        def wallet_file(name):
            if name == self.default_wallet_name:
                return wallet_dir(self.default_wallet_name,
                                  self.wallet_data_filename)
            if os.path.isdir(wallet_dir(name)):
                return wallet_dir(name, "wallet.dat")
            return wallet_dir(name)

        assert_equal(self.nodes[0].listwalletdir(),
                     {'wallets': [{
                         'name': self.default_wallet_name
                     }]})

        # check wallet.dat is created
        self.stop_nodes()
        assert_equal(
            os.path.isfile(
                wallet_dir(self.default_wallet_name,
                           self.wallet_data_filename)), True)

        # create symlink to verify wallet directory path can be referenced
        # through symlink
        os.mkdir(wallet_dir('w7'))
        os.symlink('w7', wallet_dir('w7_symlink'))

        os.symlink('..', wallet_dir('recursive_dir_symlink'))

        os.mkdir(wallet_dir('self_walletdat_symlink'))
        os.symlink('wallet.dat',
                   wallet_dir('self_walletdat_symlink/wallet.dat'))

        # rename wallet.dat to make sure plain wallet file paths (as opposed to
        # directory paths) can be loaded
        # create another dummy wallet for use in testing backups later
        self.start_node(0)
        node.createwallet("empty")
        node.createwallet("plain")
        node.createwallet("created")
        self.stop_nodes()
        empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat')
        os.rename(wallet_file("empty"), empty_wallet)
        shutil.rmtree(wallet_dir("empty"))
        empty_created_wallet = os.path.join(self.options.tmpdir,
                                            'empty.created.dat')
        os.rename(wallet_dir("created", self.wallet_data_filename),
                  empty_created_wallet)
        shutil.rmtree(wallet_dir("created"))
        os.rename(wallet_file("plain"), wallet_dir("w8"))
        shutil.rmtree(wallet_dir("plain"))

        # restart node with a mix of wallet names:
        #   w1, w2, w3 - to verify new wallets created when non-existing paths specified
        #   w          - to verify wallet name matching works when one wallet path is prefix of another
        #   sub/w5     - to verify relative wallet path is created correctly
        #   extern/w6  - to verify absolute wallet path is created correctly
        #   w7_symlink - to verify symlinked wallet path is initialized correctly
        #   w8         - to verify existing wallet file is loaded correctly. Not tested for SQLite wallets as this is a deprecated BDB behavior.
        #   ''         - to verify default wallet file is created correctly
        to_create = ['w1', 'w2', 'w3', 'w', 'sub/w5', 'w7_symlink']
        in_wallet_dir = [w.replace('/', os.path.sep)
                         for w in to_create]  # Wallets in the wallet dir
        in_wallet_dir.append(
            'w7'
        )  # w7 is not loaded or created, but will be listed by listwalletdir because w7_symlink
        to_create.append(
            os.path.join(self.options.tmpdir, 'extern/w6')
        )  # External, not in the wallet dir, so we need to avoid adding it to in_wallet_dir
        to_load = [self.default_wallet_name]
        if not self.options.descriptors:
            to_load.append('w8')
        wallet_names = to_create + to_load  # Wallet names loaded in the wallet
        in_wallet_dir += to_load  # The loaded wallets are also in the wallet dir
        self.start_node(0)
        for wallet_name in to_create:
            self.nodes[0].createwallet(wallet_name)
        for wallet_name in to_load:
            self.nodes[0].loadwallet(wallet_name)

        os.mkdir(wallet_dir('no_access'))
        os.chmod(wallet_dir('no_access'), 0)
        try:
            with self.nodes[0].assert_debug_log(expected_msgs=[
                    'Too many levels of symbolic links', 'Error scanning'
            ]):
                walletlist = self.nodes[0].listwalletdir()['wallets']
        finally:
            # Need to ensure access is restored for cleanup
            os.chmod(wallet_dir('no_access'),
                     stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
        assert_equal(sorted(map(lambda w: w['name'], walletlist)),
                     sorted(in_wallet_dir))

        assert_equal(set(node.listwallets()), set(wallet_names))

        # should raise rpc error if wallet path can't be created
        err_code = -4 if self.options.descriptors else -1
        assert_raises_rpc_error(err_code,
                                "boost::filesystem::create_directory:",
                                self.nodes[0].createwallet, "w8/bad")

        # check that all requested wallets were created
        self.stop_node(0)
        for wallet_name in wallet_names:
            assert_equal(os.path.isfile(wallet_file(wallet_name)), True)

        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=wallets'],
            'Error: Specified -walletdir "wallets" does not exist')
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=wallets'],
            'Error: Specified -walletdir "wallets" is a relative path',
            cwd=data_dir())
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=debug.log'],
            'Error: Specified -walletdir "debug.log" is not a directory',
            cwd=data_dir())

        self.start_node(0, ['-wallet=w1', '-wallet=w1'])
        self.stop_node(0, 'Warning: Ignoring duplicate -wallet w1.')

        if not self.options.descriptors:
            # Only BDB doesn't open duplicate wallet files. SQLite does not have this limitation. While this may be desired in the future, it is not necessary
            # should not initialize if one wallet is a copy of another
            shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy'))
            in_wallet_dir.append('w8_copy')
            exp_stderr = r"BerkeleyDatabase: Can't open database w8_copy \(duplicates fileid \w+ from w8\)"
            self.nodes[0].assert_start_raises_init_error(
                ['-wallet=w8', '-wallet=w8_copy'],
                exp_stderr,
                match=ErrorMatch.PARTIAL_REGEX)

        # should not initialize if wallet file is a symlink
        os.symlink('w8', wallet_dir('w8_symlink'))
        self.nodes[0].assert_start_raises_init_error(
            ['-wallet=w8_symlink'],
            r'Error: Invalid -wallet path \'w8_symlink\'\. .*',
            match=ErrorMatch.FULL_REGEX)

        # should not initialize if the specified walletdir does not exist
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=bad'],
            'Error: Specified -walletdir "bad" does not exist')
        # should not initialize if the specified walletdir is not a directory
        not_a_dir = wallet_dir('notadir')
        open(not_a_dir, 'a', encoding="utf8").close()
        self.nodes[0].assert_start_raises_init_error(
            ['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' +
            not_a_dir + '" is not a directory')

        self.log.info("Do not allow -upgradewallet with multiwallet")
        self.nodes[0].assert_start_raises_init_error([
            '-upgradewallet'
        ], "Error: Error parsing command line arguments: Invalid parameter -upgradewallet"
                                                     )

        # if wallets/ doesn't exist, datadir should be the default wallet dir
        wallet_dir2 = data_dir('walletdir')
        os.rename(wallet_dir(), wallet_dir2)
        self.start_node(0)
        self.nodes[0].createwallet("w4")
        self.nodes[0].createwallet("w5")
        assert_equal(set(node.listwallets()), {"w4", "w5"})
        w5 = wallet("w5")
        node.generatetoaddress(nblocks=1, address=w5.getnewaddress())

        # now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded
        os.rename(wallet_dir2, wallet_dir())
        self.restart_node(0, ['-nowallet', '-walletdir=' + data_dir()])
        self.nodes[0].loadwallet("w4")
        self.nodes[0].loadwallet("w5")
        assert_equal(set(node.listwallets()), {"w4", "w5"})
        w5 = wallet("w5")
        w5_info = w5.getwalletinfo()
        assert_equal(w5_info['immature_balance'], 50)

        competing_wallet_dir = os.path.join(self.options.tmpdir,
                                            'competing_walletdir')
        os.mkdir(competing_wallet_dir)
        self.restart_node(0,
                          ['-nowallet', '-walletdir=' + competing_wallet_dir])
        self.nodes[0].createwallet(self.default_wallet_name)
        if self.options.descriptors:
            exp_stderr = r"Error: SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another bitcoind?"
        else:
            exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\S*\"!"
        self.nodes[1].assert_start_raises_init_error(
            ['-walletdir=' + competing_wallet_dir],
            exp_stderr,
            match=ErrorMatch.PARTIAL_REGEX)

        self.restart_node(0)
        for wallet_name in wallet_names:
            self.nodes[0].loadwallet(wallet_name)

        assert_equal(
            sorted(
                map(lambda w: w['name'],
                    self.nodes[0].listwalletdir()['wallets'])),
            sorted(in_wallet_dir))

        wallets = [wallet(w) for w in wallet_names]
        wallet_bad = wallet("bad")

        # check wallet names and balances
        node.generatetoaddress(nblocks=1, address=wallets[0].getnewaddress())
        for wallet_name, wallet in zip(wallet_names, wallets):
            info = wallet.getwalletinfo()
            assert_equal(info['immature_balance'],
                         50 if wallet is wallets[0] else 0)
            assert_equal(info['walletname'], wallet_name)

        # accessing invalid wallet fails
        assert_raises_rpc_error(
            -18, "Requested wallet does not exist or is not loaded",
            wallet_bad.getwalletinfo)

        # accessing wallet RPC without using wallet endpoint fails
        assert_raises_rpc_error(-19, "Wallet file not specified",
                                node.getwalletinfo)

        w1, w2, w3, w4, *_ = wallets
        node.generatetoaddress(nblocks=101, address=w1.getnewaddress())
        assert_equal(w1.getbalance(), 100)
        assert_equal(w2.getbalance(), 0)
        assert_equal(w3.getbalance(), 0)
        assert_equal(w4.getbalance(), 0)

        w1.sendtoaddress(w2.getnewaddress(), 1)
        w1.sendtoaddress(w3.getnewaddress(), 2)
        w1.sendtoaddress(w4.getnewaddress(), 3)
        node.generatetoaddress(nblocks=1, address=w1.getnewaddress())
        assert_equal(w2.getbalance(), 1)
        assert_equal(w3.getbalance(), 2)
        assert_equal(w4.getbalance(), 3)

        batch = w1.batch([
            w1.getblockchaininfo.get_request(),
            w1.getwalletinfo.get_request()
        ])
        assert_equal(batch[0]["result"]["chain"], self.chain)
        assert_equal(batch[1]["result"]["walletname"], "w1")

        self.log.info('Check for per-wallet settxfee call')
        assert_equal(w1.getwalletinfo()['paytxfee'], 0)
        assert_equal(w2.getwalletinfo()['paytxfee'], 0)
        w2.settxfee(0.001)
        assert_equal(w1.getwalletinfo()['paytxfee'], 0)
        assert_equal(w2.getwalletinfo()['paytxfee'], Decimal('0.00100000'))

        self.log.info("Test dynamic wallet loading")

        self.restart_node(0, ['-nowallet'])
        assert_equal(node.listwallets(), [])
        assert_raises_rpc_error(
            -18,
            "No wallet is loaded. Load a wallet using loadwallet or create a new one with createwallet. (Note: A default wallet is no longer automatically created)",
            node.getwalletinfo)

        self.log.info("Load first wallet")
        loadwallet_name = node.loadwallet(wallet_names[0])
        assert_equal(loadwallet_name['name'], wallet_names[0])
        assert_equal(node.listwallets(), wallet_names[0:1])
        node.getwalletinfo()
        w1 = node.get_wallet_rpc(wallet_names[0])
        w1.getwalletinfo()

        self.log.info("Load second wallet")
        loadwallet_name = node.loadwallet(wallet_names[1])
        assert_equal(loadwallet_name['name'], wallet_names[1])
        assert_equal(node.listwallets(), wallet_names[0:2])
        assert_raises_rpc_error(-19, "Wallet file not specified",
                                node.getwalletinfo)
        w2 = node.get_wallet_rpc(wallet_names[1])
        w2.getwalletinfo()

        self.log.info("Concurrent wallet loading")
        threads = []
        for _ in range(3):
            n = node.cli if self.options.usecli else get_rpc_proxy(
                node.url, 1, timeout=600, coveragedir=node.coverage_dir)
            t = Thread(target=test_load_unload, args=(
                n,
                wallet_names[2],
            ))
            t.start()
            threads.append(t)
        for t in threads:
            t.join()
        global got_loading_error
        assert_equal(got_loading_error, True)

        self.log.info("Load remaining wallets")
        for wallet_name in wallet_names[2:]:
            loadwallet_name = self.nodes[0].loadwallet(wallet_name)
            assert_equal(loadwallet_name['name'], wallet_name)

        assert_equal(set(self.nodes[0].listwallets()), set(wallet_names))

        # Fail to load if wallet doesn't exist
        path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets",
                            "wallets")
        assert_raises_rpc_error(
            -18,
            "Wallet file verification failed. Failed to load database path '{}'. Path does not exist."
            .format(path), self.nodes[0].loadwallet, 'wallets')

        # Fail to load duplicate wallets
        path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets",
                            "w1", "wallet.dat")
        if self.options.descriptors:
            assert_raises_rpc_error(
                -4,
                "Wallet file verification failed. SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another bitcoind?",
                self.nodes[0].loadwallet, wallet_names[0])
        else:
            assert_raises_rpc_error(
                -4,
                "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded."
                .format(path), self.nodes[0].loadwallet, wallet_names[0])

            # This tests the default wallet that BDB makes, so SQLite wallet doesn't need to test this
            # Fail to load duplicate wallets by different ways (directory and filepath)
            path = os.path.join(self.options.tmpdir, "node0", "regtest",
                                "wallets", "wallet.dat")
            assert_raises_rpc_error(
                -4,
                "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded."
                .format(path), self.nodes[0].loadwallet, 'wallet.dat')

            # Only BDB doesn't open duplicate wallet files. SQLite does not have this limitation. While this may be desired in the future, it is not necessary
            # Fail to load if one wallet is a copy of another
            assert_raises_rpc_error(
                -4,
                "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid",
                self.nodes[0].loadwallet, 'w8_copy')

            # Fail to load if one wallet is a copy of another, test this twice to make sure that we don't re-introduce #14304
            assert_raises_rpc_error(
                -4,
                "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid",
                self.nodes[0].loadwallet, 'w8_copy')

        # Fail to load if wallet file is a symlink
        assert_raises_rpc_error(
            -4,
            "Wallet file verification failed. Invalid -wallet path 'w8_symlink'",
            self.nodes[0].loadwallet, 'w8_symlink')

        # Fail to load if a directory is specified that doesn't contain a wallet
        os.mkdir(wallet_dir('empty_wallet_dir'))
        path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets",
                            "empty_wallet_dir")
        assert_raises_rpc_error(
            -18,
            "Wallet file verification failed. Failed to load database path '{}'. Data is not in recognized format."
            .format(path), self.nodes[0].loadwallet, 'empty_wallet_dir')

        self.log.info("Test dynamic wallet creation.")

        # Fail to create a wallet if it already exists.
        path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets",
                            "w2")
        assert_raises_rpc_error(
            -4,
            "Failed to create database path '{}'. Database already exists.".
            format(path), self.nodes[0].createwallet, 'w2')

        # Successfully create a wallet with a new name
        loadwallet_name = self.nodes[0].createwallet('w9')
        in_wallet_dir.append('w9')
        assert_equal(loadwallet_name['name'], 'w9')
        w9 = node.get_wallet_rpc('w9')
        assert_equal(w9.getwalletinfo()['walletname'], 'w9')

        assert 'w9' in self.nodes[0].listwallets()

        # Successfully create a wallet using a full path
        new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir')
        new_wallet_name = os.path.join(new_wallet_dir, 'w10')
        loadwallet_name = self.nodes[0].createwallet(new_wallet_name)
        assert_equal(loadwallet_name['name'], new_wallet_name)
        w10 = node.get_wallet_rpc(new_wallet_name)
        assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name)

        assert new_wallet_name in self.nodes[0].listwallets()

        self.log.info("Test dynamic wallet unloading")

        # Test `unloadwallet` errors
        assert_raises_rpc_error(-1, "JSON value is not a string as expected",
                                self.nodes[0].unloadwallet)
        assert_raises_rpc_error(
            -18, "Requested wallet does not exist or is not loaded",
            self.nodes[0].unloadwallet, "dummy")
        assert_raises_rpc_error(
            -18, "Requested wallet does not exist or is not loaded",
            node.get_wallet_rpc("dummy").unloadwallet)
        assert_raises_rpc_error(
            -8,
            "Both the RPC endpoint wallet and wallet_name parameter were provided (only one allowed)",
            w1.unloadwallet, "w2"),
        assert_raises_rpc_error(
            -8,
            "Both the RPC endpoint wallet and wallet_name parameter were provided (only one allowed)",
            w1.unloadwallet, "w1"),

        # Successfully unload the specified wallet name
        self.nodes[0].unloadwallet("w1")
        assert 'w1' not in self.nodes[0].listwallets()

        # Successfully unload the wallet referenced by the request endpoint
        # Also ensure unload works during walletpassphrase timeout
        w2.encryptwallet('test')
        w2.walletpassphrase('test', 1)
        w2.unloadwallet()
        time.sleep(1.1)
        assert 'w2' not in self.nodes[0].listwallets()

        # Successfully unload all wallets
        for wallet_name in self.nodes[0].listwallets():
            self.nodes[0].unloadwallet(wallet_name)
        assert_equal(self.nodes[0].listwallets(), [])
        assert_raises_rpc_error(
            -18,
            "No wallet is loaded. Load a wallet using loadwallet or create a new one with createwallet. (Note: A default wallet is no longer automatically created)",
            self.nodes[0].getwalletinfo)

        # Successfully load a previously unloaded wallet
        self.nodes[0].loadwallet('w1')
        assert_equal(self.nodes[0].listwallets(), ['w1'])
        assert_equal(w1.getwalletinfo()['walletname'], 'w1')

        assert_equal(
            sorted(
                map(lambda w: w['name'],
                    self.nodes[0].listwalletdir()['wallets'])),
            sorted(in_wallet_dir))

        # Test backing up and restoring wallets
        self.log.info("Test wallet backup")
        self.restart_node(0, ['-nowallet'])
        for wallet_name in wallet_names:
            self.nodes[0].loadwallet(wallet_name)
        for wallet_name in wallet_names:
            rpc = self.nodes[0].get_wallet_rpc(wallet_name)
            addr = rpc.getnewaddress()
            backup = os.path.join(self.options.tmpdir, 'backup.dat')
            if os.path.exists(backup):
                os.unlink(backup)
            rpc.backupwallet(backup)
            self.nodes[0].unloadwallet(wallet_name)
            shutil.copyfile(
                empty_created_wallet
                if wallet_name == self.default_wallet_name else empty_wallet,
                wallet_file(wallet_name))
            self.nodes[0].loadwallet(wallet_name)
            assert_equal(rpc.getaddressinfo(addr)['ismine'], False)
            self.nodes[0].unloadwallet(wallet_name)
            shutil.copyfile(backup, wallet_file(wallet_name))
            self.nodes[0].loadwallet(wallet_name)
            assert_equal(rpc.getaddressinfo(addr)['ismine'], True)

        # Test .walletlock file is closed
        self.start_node(1)
        wallet = os.path.join(self.options.tmpdir, 'my_wallet')
        self.nodes[0].createwallet(wallet)
        if self.options.descriptors:
            assert_raises_rpc_error(-4, "Unable to obtain an exclusive lock",
                                    self.nodes[1].loadwallet, wallet)
        else:
            assert_raises_rpc_error(
                -4, "Error initializing wallet database environment",
                self.nodes[1].loadwallet, wallet)
        self.nodes[0].unloadwallet(wallet)
        self.nodes[1].loadwallet(wallet)
Example #44
0
    def test_fund_send_fund_senddirty(self):
        '''
        Test the same as test_fund_send_fund_send, except send the 10 AUR with
        the avoid_reuse flag set to false. This means the 10 AUR send should succeed,
        where it fails in test_fund_send_fund_send.
        '''

        fundaddr = self.nodes[1].getnewaddress()
        retaddr = self.nodes[0].getnewaddress()

        self.nodes[0].sendtoaddress(fundaddr, 10)
        self.nodes[0].generate(1)
        self.sync_all()

        # listunspent should show 1 single, unused 10 aur output
        assert_unspent(self.nodes[1], total_count=1, total_sum=10, reused_supported=True, reused_count=0)
        # getbalances should show no used, 10 aur trusted
        assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10})
        # node 0 should not show a used entry, as it does not enable avoid_reuse
        assert("used" not in self.nodes[0].getbalances()["mine"])

        self.nodes[1].sendtoaddress(retaddr, 5)
        self.nodes[0].generate(1)
        self.sync_all()

        # listunspent should show 1 single, unused 5 aur output
        assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_supported=True, reused_count=0)
        # getbalances should show no used, 5 aur trusted
        assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})

        self.nodes[0].sendtoaddress(fundaddr, 10)
        self.nodes[0].generate(1)
        self.sync_all()

        # listunspent should show 2 total outputs (5, 10 aur), one unused (5), one reused (10)
        assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
        # getbalances should show 10 used, 5 aur trusted
        assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5}

        self.nodes[1].sendtoaddress(address=retaddr, amount=10, avoid_reuse=False)

        # listunspent should show 1 total outputs (5 aur), unused
        assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_count=0)
        # getbalances should show no used, 5 aur trusted
        assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})

        # node 1 should now have about 5 aur left (for both cases)
        assert_approx(self.nodes[1].getbalance(), 5, 0.001)
        assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 5, 0.001)

    def test_fund_send_fund_send(self):
        '''
        Test the simple case where [1] generates a new address A, then
        [0] sends 10 AUR to A.
        [1] spends 5 AUR from A. (leaving roughly 5 BTC useable)
        [0] sends 10 AUR to A again.
        [1] tries to spend 10 AUR (fails; dirty).
        [1] tries to spend 4 AUR (succeeds; change address sufficient)
        '''

        fundaddr = self.nodes[1].getnewaddress()
        retaddr = self.nodes[0].getnewaddress()

        self.nodes[0].sendtoaddress(fundaddr, 10)
        self.nodes[0].generate(1)
        self.sync_all()

        # listunspent should show 1 single, unused 10 aur output
        assert_unspent(self.nodes[1], total_count=1, total_sum=10, reused_supported=True, reused_count=0)
        # getbalances should show no used, 10 aur trusted
        assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10})

        self.nodes[1].sendtoaddress(retaddr, 5)
        self.nodes[0].generate(1)
        self.sync_all()

        # listunspent should show 1 single, unused 5 aur output
        assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_supported=True, reused_count=0)
        # getbalances should show no used, 5 aur trusted
        assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})

        self.nodes[0].sendtoaddress(fundaddr, 10)
        self.nodes[0].generate(1)
        self.sync_all()

        # listunspent should show 2 total outputs (5, 10 aur), one unused (5), one reused (10)
        assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
        # getbalances should show 10 used, 5 aur trusted
        assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})

        # node 1 should now have a balance of 5 (no dirty) or 15 (including dirty)
        assert_approx(self.nodes[1].getbalance(), 5, 0.001)
        assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 15, 0.001)

        assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10)

        self.nodes[1].sendtoaddress(retaddr, 4)

        # listunspent should show 2 total outputs (1, 10 aur), one unused (1), one reused (10)
        assert_unspent(self.nodes[1], total_count=2, total_sum=11, reused_count=1, reused_sum=10)
        # getbalances should show 10 used, 1 aur trusted
        assert_balances(self.nodes[1], mine={"used": 10, "trusted": 1})

        # node 1 should now have about 1 aur left (no dirty) and 11 (including dirty)
        assert_approx(self.nodes[1].getbalance(), 1, 0.001)
        assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 11, 0.001)

if __name__ == '__main__':
    AvoidReuseTest().main()
Example #45
0
    def run_test(self):
        self.log.info('Setting up wallets')
        self.nodes[0].createwallet(wallet_name='w0',
                                   disable_private_keys=False,
                                   descriptors=True)
        w0 = self.nodes[0].get_wallet_rpc('w0')

        self.nodes[1].createwallet(wallet_name='w1',
                                   disable_private_keys=True,
                                   blank=True,
                                   descriptors=True)
        w1 = self.nodes[1].get_wallet_rpc('w1')
        assert_equal(w1.getwalletinfo()['keypoolsize'], 0)

        self.nodes[1].createwallet(wallet_name="wpriv",
                                   disable_private_keys=False,
                                   blank=True,
                                   descriptors=True)
        wpriv = self.nodes[1].get_wallet_rpc("wpriv")
        assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0)

        self.log.info('Mining coins')
        w0.generatetoaddress(101, w0.getnewaddress())

        # RPC importdescriptors -----------------------------------------------

        # # Test import fails if no descriptor present
        key = get_generate_key()
        self.log.info("Import should fail if a descriptor is not provided")
        self.test_importdesc({"timestamp": "now"},
                             success=False,
                             error_code=-8,
                             error_message='Descriptor not found.')

        # # Test importing of a P2PKH descriptor
        key = get_generate_key()
        self.log.info("Should import a p2pkh descriptor")
        self.test_importdesc(
            {
                "desc": descsum_create("pkh(" + key.pubkey + ")"),
                "timestamp": "now",
                "label": "Descriptor import test"
            },
            success=True)
        test_address(w1,
                     key.p2pkh_addr,
                     solvable=True,
                     ismine=True,
                     labels=["Descriptor import test"])
        assert_equal(w1.getwalletinfo()['keypoolsize'], 0)

        self.log.info("Internal addresses cannot have labels")
        self.test_importdesc(
            {
                "desc": descsum_create("pkh(" + key.pubkey + ")"),
                "timestamp": "now",
                "internal": True,
                "label": "Descriptor import test"
            },
            success=False,
            error_code=-8,
            error_message="Internal addresses should not have a label")

        self.log.info("Internal addresses should be detected as such")
        key = get_generate_key()
        addr = key_to_p2pkh(key.pubkey)
        self.test_importdesc(
            {
                "desc": descsum_create("pkh(" + key.pubkey + ")"),
                "timestamp": "now",
                "internal": True
            },
            success=True)
        info = w1.getaddressinfo(addr)
        assert_equal(info["ismine"], True)
        assert_equal(info["ischange"], True)

        # # Test importing of a P2SH-P2WPKH descriptor
        key = get_generate_key()
        self.log.info(
            "Should not import a p2sh-p2wpkh descriptor without checksum")
        self.test_importdesc(
            {
                "desc": "sh(wpkh(" + key.pubkey + "))",
                "timestamp": "now"
            },
            success=False,
            error_code=-5,
            error_message="Missing checksum")

        self.log.info(
            "Should not import a p2sh-p2wpkh descriptor that has range specified"
        )
        self.test_importdesc(
            {
                "desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
                "timestamp": "now",
                "range": 1,
            },
            success=False,
            error_code=-8,
            error_message=
            "Range should not be specified for an un-ranged descriptor")

        self.log.info(
            "Should not import a p2sh-p2wpkh descriptor and have it set to active"
        )
        self.test_importdesc(
            {
                "desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
                "timestamp": "now",
                "active": True,
            },
            success=False,
            error_code=-8,
            error_message="Active descriptors must be ranged")

        self.log.info("Should import a (non-active) p2sh-p2wpkh descriptor")
        self.test_importdesc(
            {
                "desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
                "timestamp": "now",
                "active": False,
            },
            success=True)
        assert_equal(w1.getwalletinfo()['keypoolsize'], 0)

        test_address(w1, key.p2sh_p2wpkh_addr, ismine=True, solvable=True)

        # Check persistence of data and that loading works correctly
        w1.unloadwallet()
        self.nodes[1].loadwallet('w1')
        test_address(w1, key.p2sh_p2wpkh_addr, ismine=True, solvable=True)

        # # Test importing of a multisig descriptor
        key1 = get_generate_key()
        key2 = get_generate_key()
        self.log.info("Should import a 1-of-2 bare multisig from descriptor")
        self.test_importdesc(
            {
                "desc":
                descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey +
                               ")"),
                "timestamp":
                "now"
            },
            success=True)
        self.log.info(
            "Should not treat individual keys from the imported bare multisig as watchonly"
        )
        test_address(w1, key1.p2pkh_addr, ismine=False)

        # # Test ranged descriptors
        xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
        xpub = "tpubD6NzVbkrYhZ4YNXVQbNhMK1WqguFsUXceaVJKbmno2aZ3B6QfbMeraaYvnBSGpV3vxLyTTK9DYT1yoEck4XUScMzXoQ2U2oSmE2JyMedq3H"
        addresses = [
            "2dpASU3kVq1JkHY4guu3SfHLPLByeku2Us4",
            "2dZUVNpeY85tLsM5kSEHYgGQbQZsSQXrhXv"
        ]  # hdkeypath=m/0'/0'/0' and 1'
        addresses += [
            "ert1qrd3n235cj2czsfmsuvqqpr3lu6lg0ju7gv2ekn",
            "ert1qfqeppuvj0ww98r6qghmdkj70tv8qpche0daak8"
        ]  # wpkh subscripts corresponding to the above addresses
        desc = "sh(wpkh(" + xpub + "/0/0/*" + "))"

        self.log.info("Ranged descriptors cannot have labels")
        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [0, 100],
                "label": "test"
            },
            success=False,
            error_code=-8,
            error_message='Ranged descriptors should not have a label')

        self.log.info("Private keys required for private keys enabled wallet")
        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [0, 100]
            },
            success=False,
            error_code=-4,
            error_message=
            'Cannot import descriptor without private keys to a wallet with private keys enabled',
            wallet=wpriv)

        self.log.info(
            "Ranged descriptor import should warn without a specified range")
        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now"
            },
            success=True,
            warnings=['Range not given, using default keypool range'])
        assert_equal(w1.getwalletinfo()['keypoolsize'], 0)

        # # Test importing of a ranged descriptor with xpriv
        self.log.info(
            "Should not import a ranged descriptor that includes xpriv into a watch-only wallet"
        )
        desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": 1
            },
            success=False,
            error_code=-4,
            error_message=
            'Cannot import private keys to a wallet with private keys disabled'
        )

        self.log.info(
            "Should not import a descriptor with hardened derivations when private keys are disabled"
        )
        self.test_importdesc(
            {
                "desc": descsum_create("wpkh(" + xpub + "/1h/*)"),
                "timestamp": "now",
                "range": 1
            },
            success=False,
            error_code=-4,
            error_message=
            'Cannot expand descriptor. Probably because of hardened derivations without private keys provided'
        )

        for address in addresses:
            test_address(w1, address, ismine=False, solvable=False)

        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": -1
            },
            success=False,
            error_code=-8,
            error_message='End of range is too high')

        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [-1, 10]
            },
            success=False,
            error_code=-8,
            error_message='Range should be greater or equal than 0')

        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]
            },
            success=False,
            error_code=-8,
            error_message='End of range is too high')

        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [2, 1]
            },
            success=False,
            error_code=-8,
            error_message=
            'Range specified as [begin,end] must not have begin after end')

        self.test_importdesc(
            {
                "desc": descsum_create(desc),
                "timestamp": "now",
                "range": [0, 1000001]
            },
            success=False,
            error_code=-8,
            error_message='Range is too large')

        # Make sure ranged imports import keys in order
        w1 = self.nodes[1].get_wallet_rpc('w1')
        self.log.info('Key ranges should be imported in order')
        xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
        addresses = [
            'ert1qtmp74ayg7p24uslctssvjm06q5phz4yr7gdkdv',  # m/0'/0'/0
            'ert1q8vprchan07gzagd5e6v9wd7azyucksq27vtyg8',  # m/0'/0'/1
            'ert1qtuqdtha7zmqgcrr26n2rqxztv5y8rafje32zpu',  # m/0'/0'/2
            'ert1qau64272ymawq26t90md6an0ps99qkrsevnwyt0',  # m/0'/0'/3
            'ert1qsg97266hrh6cpmutqen8s4s962aryy77246hk0',  # m/0'/0'/4
        ]

        self.test_importdesc(
            {
                'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
                'active': True,
                'range': [0, 2],
                'timestamp': 'now'
            },
            success=True)
        self.test_importdesc(
            {
                'desc':
                descsum_create('sh(wpkh([abcdef12/0h/0h]' + xpub + '/*))'),
                'active': True,
                'range': [0, 2],
                'timestamp': 'now'
            },
            success=True)
        self.test_importdesc(
            {
                'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
                'active': True,
                'range': [0, 2],
                'timestamp': 'now'
            },
            success=True)

        assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
        for i, expected_addr in enumerate(addresses):
            received_addr = w1.getnewaddress('', 'bech32')
            assert_raises_rpc_error(-4, 'This wallet has no available keys',
                                    w1.getrawchangeaddress, 'bech32')
            assert_equal(received_addr, expected_addr)
            bech32_addr_info = w1.getaddressinfo(received_addr)
            assert_equal(bech32_addr_info['desc'][:23],
                         'wpkh([80002067/0\'/0\'/{}]'.format(i))

            shwpkh_addr = w1.getnewaddress('', 'p2sh-segwit')
            shwpkh_addr_info = w1.getaddressinfo(shwpkh_addr)
            assert_equal(shwpkh_addr_info['desc'][:26],
                         'sh(wpkh([abcdef12/0\'/0\'/{}]'.format(i))

            pkh_addr = w1.getnewaddress('', 'legacy')
            pkh_addr_info = w1.getaddressinfo(pkh_addr)
            assert_equal(pkh_addr_info['desc'][:22],
                         'pkh([12345678/0\'/0\'/{}]'.format(i))

            assert_equal(
                w1.getwalletinfo()['keypoolsize'], 4 * 3
            )  # After retrieving a key, we don't refill the keypool again, so it's one less for each address type
        w1.keypoolrefill()
        assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)

        # Check active=False default
        self.log.info('Check imported descriptors are not active by default')
        self.test_importdesc(
            {
                'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
                'range': [0, 2],
                'timestamp': 'now',
                'internal': True
            },
            success=True)
        assert_raises_rpc_error(-4, 'This wallet has no available keys',
                                w1.getrawchangeaddress, 'legacy')

        # # Test importing a descriptor containing a WIF private key
        wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
        address = "XDod7vy3NZgmzmpHJdZvKxoF8D1eqEuH2W"
        desc = "sh(wpkh(" + wif_priv + "))"
        self.log.info(
            "Should import a descriptor with a WIF private key as spendable")
        self.test_importdesc({
            "desc": descsum_create(desc),
            "timestamp": "now"
        },
                             success=True,
                             wallet=wpriv)
        test_address(wpriv, address, solvable=True, ismine=True)
        txid = w0.sendtoaddress(address, 49.99993240)
        w0.generatetoaddress(6, w0.getnewaddress())
        self.sync_blocks()
        tx = wpriv.createrawtransaction([{
            "txid": txid,
            "vout": 0
        }], [{
            w0.getnewaddress(): 49.999
        }, {
            "fee": 0.0009324
        }])
        signed_tx = wpriv.signrawtransactionwithwallet(tx)
        w1.sendrawtransaction(signed_tx['hex'])

        # Make sure that we can use import and use multisig as addresses
        self.log.info(
            'Test that multisigs can be imported, signed for, and getnewaddress\'d'
        )
        self.nodes[1].createwallet(wallet_name="wmulti_priv",
                                   disable_private_keys=False,
                                   blank=True,
                                   descriptors=True)
        wmulti_priv = self.nodes[1].get_wallet_rpc("wmulti_priv")
        assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 0)

        self.test_importdesc(
            {
                "desc":
                "wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/0h/0h/*))#m2sr93jn",
                "active": True,
                "range": 1000,
                "next_index": 0,
                "timestamp": "now"
            },
            success=True,
            wallet=wmulti_priv)
        self.test_importdesc(
            {
                "desc":
                "wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/1h/0h/*))#q3sztvx5",
                "active": True,
                "internal": True,
                "range": 1000,
                "next_index": 0,
                "timestamp": "now"
            },
            success=True,
            wallet=wmulti_priv)

        assert_equal(
            wmulti_priv.getwalletinfo()['keypoolsize'],
            1001)  # Range end (1000) is inclusive, so 1001 addresses generated
        addr = wmulti_priv.getnewaddress('', 'bech32')
        assert_equal(
            addr,
            'ert1qdt0qy5p7dzhxzmegnn4ulzhard33s2809arjqgjndx87rv5vd0fqmt2fdr'
        )  # Derived at m/84'/0'/0'/0
        change_addr = wmulti_priv.getrawchangeaddress('bech32')
        assert_equal(
            change_addr,
            'ert1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lns4xadwa')
        assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1000)
        txid = w0.sendtoaddress(addr, 10)
        self.nodes[0].generate(6)
        self.sync_all()
        send_txid = wmulti_priv.sendtoaddress(w0.getnewaddress(), 8)
        decoded = wmulti_priv.decoderawtransaction(
            wmulti_priv.gettransaction(send_txid)['hex'])
        assert_equal(len(decoded['vin'][0]['txinwitness']), 4)
        self.nodes[0].generate(6)
        self.sync_all()

        self.nodes[1].createwallet(wallet_name="wmulti_pub",
                                   disable_private_keys=True,
                                   blank=True,
                                   descriptors=True)
        wmulti_pub = self.nodes[1].get_wallet_rpc("wmulti_pub")
        assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 0)

        self.test_importdesc(
            {
                "desc":
                "wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))#tsry0s5e",
                "active": True,
                "range": 1000,
                "next_index": 0,
                "timestamp": "now"
            },
            success=True,
            wallet=wmulti_pub)
        self.test_importdesc(
            {
                "desc":
                "wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))#c08a2rzv",
                "active": True,
                "internal": True,
                "range": 1000,
                "next_index": 0,
                "timestamp": "now"
            },
            success=True,
            wallet=wmulti_pub)

        assert_equal(
            wmulti_pub.getwalletinfo()['keypoolsize'], 1000
        )  # The first one was already consumed by previous import and is detected as used
        addr = wmulti_pub.getnewaddress('', 'bech32')
        assert_equal(
            addr,
            'ert1qp8s25ckjl7gr6x2q3dx3tn2pytwp05upkjztk6ey857tt50r5aeqzfnj2p'
        )  # Derived at m/84'/0'/0'/1
        change_addr = wmulti_pub.getrawchangeaddress('bech32')
        assert_equal(
            change_addr,
            'ert1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lns4xadwa')
        assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 999)
        txid = w0.sendtoaddress(addr, 10)
        vout = find_vout_for_address(self.nodes[0], txid, addr)
        self.nodes[0].generate(6)
        self.sync_all()
        assert_equal(wmulti_pub.getbalance(), wmulti_priv.getbalance())

        # Make sure that descriptor wallets containing multiple xpubs in a single descriptor load correctly
        wmulti_pub.unloadwallet()
        self.nodes[1].loadwallet('wmulti_pub')

        self.log.info("Multisig with distributed keys")
        self.nodes[1].createwallet(wallet_name="wmulti_priv1",
                                   descriptors=True)
        wmulti_priv1 = self.nodes[1].get_wallet_rpc("wmulti_priv1")
        res = wmulti_priv1.importdescriptors([{
            "desc":
            descsum_create(
                "wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"
            ),
            "active":
            True,
            "range":
            1000,
            "next_index":
            0,
            "timestamp":
            "now"
        }, {
            "desc":
            descsum_create(
                "wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"
            ),
            "active":
            True,
            "internal":
            True,
            "range":
            1000,
            "next_index":
            0,
            "timestamp":
            "now"
        }])
        assert_equal(res[0]['success'], True)
        assert_equal(
            res[0]['warnings'][0],
            'Not all private keys provided. Some wallet functionality may return unexpected errors'
        )
        assert_equal(res[1]['success'], True)
        assert_equal(
            res[1]['warnings'][0],
            'Not all private keys provided. Some wallet functionality may return unexpected errors'
        )

        self.nodes[1].createwallet(wallet_name='wmulti_priv2',
                                   blank=True,
                                   descriptors=True)
        wmulti_priv2 = self.nodes[1].get_wallet_rpc('wmulti_priv2')
        res = wmulti_priv2.importdescriptors([{
            "desc":
            descsum_create(
                "wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"
            ),
            "active":
            True,
            "range":
            1000,
            "next_index":
            0,
            "timestamp":
            "now"
        }, {
            "desc":
            descsum_create(
                "wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"
            ),
            "active":
            True,
            "internal":
            True,
            "range":
            1000,
            "next_index":
            0,
            "timestamp":
            "now"
        }])
        assert_equal(res[0]['success'], True)
        assert_equal(
            res[0]['warnings'][0],
            'Not all private keys provided. Some wallet functionality may return unexpected errors'
        )
        assert_equal(res[1]['success'], True)
        assert_equal(
            res[1]['warnings'][0],
            'Not all private keys provided. Some wallet functionality may return unexpected errors'
        )

        rawtx = self.nodes[1].createrawtransaction([{
            'txid': txid,
            'vout': vout
        }], [{
            w0.getnewaddress(): 9.999
        }, {
            "fee": 0.001
        }])
        tx_signed_1 = wmulti_priv1.signrawtransactionwithwallet(rawtx)
        assert_equal(tx_signed_1['complete'], False)
        tx_signed_2 = wmulti_priv2.signrawtransactionwithwallet(
            tx_signed_1['hex'])
        assert_equal(tx_signed_2['complete'], True)
        self.nodes[1].sendrawtransaction(tx_signed_2['hex'])

        self.log.info("Combo descriptors cannot be active")
        self.test_importdesc(
            {
                "desc":
                descsum_create(
                    "combo(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"
                ),
                "active":
                True,
                "range":
                1,
                "timestamp":
                "now"
            },
            success=False,
            error_code=-4,
            error_message="Combo descriptors cannot be set to active")

        self.log.info("Descriptors with no type cannot be active")
        self.test_importdesc(
            {
                "desc":
                descsum_create(
                    "pk(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"
                ),
                "active":
                True,
                "range":
                1,
                "timestamp":
                "now"
            },
            success=True,
            warnings=["Unknown output type, cannot set descriptor to active."])
Example #46
0
    def run_test(self):
        wallet_unenc_dump = os.path.join(self.nodes[0].datadir,
                                         "wallet.unencrypted.dump")
        wallet_enc_dump = os.path.join(self.nodes[0].datadir,
                                       "wallet.encrypted.dump")

        # generate 20 addresses to compare against the dump
        # but since we add a p2sh-p2wpkh address for the first pubkey in the
        # wallet, we will expect 21 addresses in the dump
        test_addr_count = 20
        addrs = []
        for i in range(0, test_addr_count):
            addr = self.nodes[0].getnewaddress()
            vaddr = self.nodes[0].getaddressinfo(
                addr)  #required to get hd keypath
            addrs.append(vaddr)
        # Should be a no-op:
        self.nodes[0].keypoolrefill()

        # Test scripts dump by adding a P2SH witness and a 1-of-1 multisig address
        witness_addr = self.nodes[0].addwitnessaddress(addrs[0]["address"],
                                                       True)
        multisig_addr = self.nodes[0].addmultisigaddress(
            1, [addrs[1]["address"]])["address"]
        script_addrs = [witness_addr, multisig_addr]

        # dump unencrypted wallet
        result = self.nodes[0].dumpwallet(wallet_unenc_dump)
        assert_equal(result['filename'], wallet_unenc_dump)

        found_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc, witness_addr_ret = \
            read_dump(wallet_unenc_dump, addrs, script_addrs, None)
        assert_equal(found_addr,
                     test_addr_count)  # all keys must be in the dump
        assert_equal(found_script_addr, 2)  # all scripts must be in the dump
        assert_equal(found_addr_chg, 0)  # 0 blocks where mined
        assert_equal(found_addr_rsv, 90 * 2)  # 90 keys plus 100% internal keys
        assert_equal(witness_addr_ret,
                     witness_addr)  # p2sh-p2wsh address added to the first key

        #encrypt wallet, restart, unlock and dump
        self.nodes[0].node_encrypt_wallet('test')
        self.start_node(0)
        self.nodes[0].walletpassphrase('test', 10)
        # Should be a no-op:
        self.nodes[0].keypoolrefill()
        self.nodes[0].dumpwallet(wallet_enc_dump)

        found_addr, found_script_addr, found_addr_chg, found_addr_rsv, _, witness_addr_ret = \
            read_dump(wallet_enc_dump, addrs, script_addrs, hd_master_addr_unenc)
        assert_equal(found_addr, test_addr_count)
        assert_equal(found_script_addr, 2)
        assert_equal(found_addr_chg,
                     90 * 2)  # old reserve keys are marked as change now
        assert_equal(found_addr_rsv, 90 * 2)
        assert_equal(witness_addr_ret, witness_addr)

        # Overwriting should fail
        assert_raises_rpc_error(
            -8, "already exists",
            lambda: self.nodes[0].dumpwallet(wallet_enc_dump))

        # Restart node with new wallet, and test importwallet
        self.stop_node(0)
        self.start_node(0, ['-wallet=w2'])

        # Make sure the address is not IsMine before import
        result = self.nodes[0].getaddressinfo(multisig_addr)
        assert (result['ismine'] == False)

        self.nodes[0].importwallet(wallet_unenc_dump)

        # Now check IsMine is true
        result = self.nodes[0].getaddressinfo(multisig_addr)
        assert (result['ismine'] == True)
Example #47
0
    def run_test(self):
        # Test `prioritisetransaction` required parameters
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction)
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction, '')
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction, '', 0)

        # Test `prioritisetransaction` invalid extra parameters
        assert_raises_rpc_error(-1, "prioritisetransaction",
                                self.nodes[0].prioritisetransaction, '', 0, 0,
                                0)

        # Test `prioritisetransaction` invalid `txid`
        assert_raises_rpc_error(-1,
                                "txid must be hexadecimal string",
                                self.nodes[0].prioritisetransaction,
                                txid='foo',
                                fee_delta=0)

        # Test `prioritisetransaction` invalid `dummy`
        txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000'
        assert_raises_rpc_error(-1, "JSON value is not a number as expected",
                                self.nodes[0].prioritisetransaction, txid,
                                'foo', 0)
        assert_raises_rpc_error(
            -8,
            "Priority is no longer supported, dummy argument to prioritisetransaction must be 0.",
            self.nodes[0].prioritisetransaction, txid, 1, 0)

        # Test `prioritisetransaction` invalid `fee_delta`
        assert_raises_rpc_error(-1,
                                "JSON value is not an integer as expected",
                                self.nodes[0].prioritisetransaction,
                                txid=txid,
                                fee_delta='foo')

        self.txouts = gen_return_txouts()
        self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']

        utxo_count = 90
        utxos = create_confirmed_utxos(self.relayfee, self.nodes[0],
                                       utxo_count)
        base_fee = self.relayfee * 100  # our transactions are smaller than 100kb
        txids = []

        # Create 3 batches of transactions at 3 different fee rate levels
        range_size = utxo_count // 3
        for i in range(3):
            txids.append([])
            start_range = i * range_size
            end_range = start_range + range_size
            txids[i] = create_lots_of_big_transactions(
                self.nodes[0], self.txouts, utxos[start_range:end_range],
                end_range - start_range, (i + 1) * base_fee)

        # Make sure that the size of each group of transactions exceeds
        # MAX_BLOCK_BASE_SIZE -- otherwise the test needs to be revised to create
        # more transactions.
        mempool = self.nodes[0].getrawmempool(True)
        sizes = [0, 0, 0]
        for i in range(3):
            for j in txids[i]:
                assert (j in mempool)
                sizes[i] += mempool[j]['size']
            assert (sizes[i] > MAX_BLOCK_BASE_SIZE)  # Fail => raise utxo_count

        # add a fee delta to something in the cheapest bucket and make sure it gets mined
        # also check that a different entry in the cheapest bucket is NOT mined
        self.nodes[0].prioritisetransaction(txid=txids[0][0],
                                            fee_delta=int(3 * base_fee * COIN))

        self.nodes[0].generate(1)

        mempool = self.nodes[0].getrawmempool()
        self.log.info("Assert that prioritised transaction was mined")
        assert (txids[0][0] not in mempool)
        assert (txids[0][1] in mempool)

        high_fee_tx = None
        for x in txids[2]:
            if x not in mempool:
                high_fee_tx = x

        # Something high-fee should have been mined!
        assert (high_fee_tx != None)

        # Add a prioritisation before a tx is in the mempool (de-prioritising a
        # high-fee transaction so that it's now low fee).
        self.nodes[0].prioritisetransaction(
            txid=high_fee_tx, fee_delta=-int(2 * base_fee * COIN))

        # Add everything back to mempool
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())

        # Check to make sure our high fee rate tx is back in the mempool
        mempool = self.nodes[0].getrawmempool()
        assert (high_fee_tx in mempool)

        # Now verify the modified-high feerate transaction isn't mined before
        # the other high fee transactions. Keep mining until our mempool has
        # decreased by all the high fee size that we calculated above.
        while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]):
            self.nodes[0].generate(1)

        # High fee transaction should not have been mined, but other high fee rate
        # transactions should have been.
        mempool = self.nodes[0].getrawmempool()
        self.log.info(
            "Assert that de-prioritised transaction is still in mempool")
        assert (high_fee_tx in mempool)
        for x in txids[2]:
            if (x != high_fee_tx):
                assert (x not in mempool)

        # Create a free transaction.  Should be rejected.
        utxo_list = self.nodes[0].listunspent()
        assert (len(utxo_list) > 0)
        utxo = utxo_list[0]

        inputs = []
        outputs = {}
        inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
        outputs[self.nodes[0].getnewaddress()] = utxo["amount"]
        raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
        tx_hex = self.nodes[0].signrawtransactionwithwallet(raw_tx)["hex"]
        tx_id = self.nodes[0].decoderawtransaction(tx_hex)["txid"]

        # This will raise an exception due to min relay fee not being met
        assert_raises_rpc_error(-26, "min relay fee not met",
                                self.nodes[0].sendrawtransaction, tx_hex)
        assert (tx_id not in self.nodes[0].getrawmempool())

        # This is a less than 1000-byte transaction, so just set the fee
        # to be the minimum for a 1000-byte transaction and check that it is
        # accepted.
        self.nodes[0].prioritisetransaction(txid=tx_id,
                                            fee_delta=int(self.relayfee *
                                                          COIN))

        self.log.info(
            "Assert that prioritised free transaction is accepted to mempool")
        assert_equal(self.nodes[0].sendrawtransaction(tx_hex), tx_id)
        assert (tx_id in self.nodes[0].getrawmempool())

        # Test that calling prioritisetransaction is sufficient to trigger
        # getblocktemplate to (eventually) return a new block.
        mock_time = int(time.time())
        self.nodes[0].setmocktime(mock_time)
        template = self.nodes[0].getblocktemplate()
        self.nodes[0].prioritisetransaction(
            txid=tx_id, fee_delta=-int(self.relayfee * COIN))
        self.nodes[0].setmocktime(mock_time + 10)
        new_template = self.nodes[0].getblocktemplate()

        assert (template != new_template)
Example #48
0
    def run_test(self):
        node = self.nodes[0]

        self.log.info('Start with empty mempool, and 200 blocks')
        self.mempool_size = 0
        wait_until(lambda: node.getblockcount() == 200)
        assert_equal(node.getmempoolinfo()['size'], self.mempool_size)

        self.log.info('Should not accept garbage to testmempoolaccept')
        assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
        assert_raises_rpc_error(-8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22']))
        assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))

        self.log.info('A transaction already in the blockchain')
        coin = node.listunspent()[0]  # Pick a random coin(base) to spend
        raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction(
            inputs=[{'txid': coin['txid'], 'vout': coin['vout']}],
            outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}],
        ))['hex']
        txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, allowhighfees=True)
        node.generate(1)
        self.check_mempool_result(
            result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': '18: txn-already-known'}],
            rawtxs=[raw_tx_in_block],
        )

        self.log.info('A transaction not in the mempool')
        fee = 0.00000700
        raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction(
            inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}],  # RBF is used later
            outputs=[{node.getnewaddress(): 0.3 - fee}],
        ))['hex']
        tx = CTransaction()
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': True}],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction in the mempool')
        node.sendrawtransaction(hexstring=raw_tx_0)
        self.mempool_size = 1
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': '18: txn-already-in-mempool'}],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that replaces a mempool transaction')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vout[0].nValue -= int(fee * COIN)  # Double the fee
        tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1  # Now, opt out of RBF
        raw_tx_0 = node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': True}],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that conflicts with an unconfirmed tx')
        # Send the transaction that replaces the mempool transaction and opts out of replaceability
        node.sendrawtransaction(hexstring=bytes_to_hex_str(tx.serialize()), allowhighfees=True)
        # take original raw_tx_0
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vout[0].nValue -= int(4 * fee * COIN)  # Set more fee
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '18: txn-mempool-conflict'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
            allowhighfees=True,
        )

        self.log.info('A transaction with missing inputs, that never existed')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with missing inputs, that existed once in the past')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
        tx.vin[0].prevout.n = 1  # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
        raw_tx_1 = node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
        txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, allowhighfees=True)
        # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
        raw_tx_spend_both = node.signrawtransactionwithwallet(node.createrawtransaction(
            inputs=[
                {'txid': txid_0, 'vout': 0},
                {'txid': txid_1, 'vout': 0},
            ],
            outputs=[{node.getnewaddress(): 0.1}]
        ))['hex']
        txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, allowhighfees=True)
        node.generate(1)
        self.mempool_size = 0
        # Now see if we can add the coins back to the utxo set by sending the exact txs again
        self.check_mempool_result(
            result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}],
            rawtxs=[raw_tx_0],
        )
        self.check_mempool_result(
            result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}],
            rawtxs=[raw_tx_1],
        )

        self.log.info('Create a signed "reference" tx for later use')
        raw_tx_reference = node.signrawtransactionwithwallet(node.createrawtransaction(
            inputs=[{'txid': txid_spend_both, 'vout': 0}],
            outputs=[{node.getnewaddress(): 0.05}],
        ))['hex']
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        # Reference tx should be valid on itself
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': True}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with no outputs')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout = []
        # Skip re-signing the transaction for context independent checks from now on
        # tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex'])))
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-empty'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A really large transaction')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin = [tx.vin[0]] * (MAX_BLOCK_BASE_SIZE // len(tx.vin[0].serialize()))
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-oversize'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with negative output value')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].nValue *= -1
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-negative'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with too large output value')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].nValue = 210000000 * COIN + 1
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-toolarge'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with too large sum of output values')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout = [tx.vout[0]] * 2
        tx.vout[0].nValue = 210000000 * COIN
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-txouttotal-toolarge'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction with duplicate inputs')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin = [tx.vin[0]] * 2
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-inputs-duplicate'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A coinbase transaction')
        # Pick the input of the first tx we signed, so it has to be a coinbase tx
        raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent)))
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: coinbase'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('Some nonstandard transactions')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.nVersion = 3  # A version currently non-standard
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: version'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].scriptPubKey = CScript([OP_0])  # Some non-standard script
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptpubkey'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin[0].scriptSig = CScript([OP_HASH160])  # Some not-pushonly scriptSig
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptsig-not-pushonly'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript([OP_HASH160, hash160(b'burn'), OP_EQUAL]))
        num_scripts = 100000 // len(output_p2sh_burn.serialize())  # Use enough outputs to make the tx too large for our policy
        tx.vout = [output_p2sh_burn] * num_scripts
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: tx-size'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0] = output_p2sh_burn
        tx.vout[0].nValue -= 1  # Make output smaller, such that it is dust for our policy
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: dust'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
        tx.vout = [tx.vout[0]] * 2
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: multi-op-return'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A timelocked transaction')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin[0].nSequence -= 1  # Should be non-max, so locktime is not ignored
        tx.nLockTime = node.getblockcount() + 1
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-final'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
        )

        self.log.info('A transaction that is locked by BIP68 sequence logic')
        tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
        tx.vin[0].nSequence = 2  # We could include it in the second block mined from now, but not the very next one
        # Can skip re-signing the tx because of early rejection
        self.check_mempool_result(
            result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-BIP68-final'}],
            rawtxs=[bytes_to_hex_str(tx.serialize())],
            allowhighfees=True,
        )
Example #49
0
    def run_test(self):
        self.nodes[0].generate(161) #block 161

        self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        tmpl = self.nodes[0].getblocktemplate({})
        assert(tmpl['sizelimit'] == 1000000)
        assert('weightlimit' not in tmpl)
        assert(tmpl['sigoplimit'] == 20000)
        assert(tmpl['transactions'][0]['hash'] == txid)
        assert(tmpl['transactions'][0]['sigops'] == 2)
        tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
        assert(tmpl['sizelimit'] == 1000000)
        assert('weightlimit' not in tmpl)
        assert(tmpl['sigoplimit'] == 20000)
        assert(tmpl['transactions'][0]['hash'] == txid)
        assert(tmpl['transactions'][0]['sigops'] == 2)
        self.nodes[0].generate(1) #block 162

        balance_presetup = self.nodes[0].getbalance()
        self.pubkey = []
        p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
        wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
        for i in range(3):
            newaddress = self.nodes[i].getnewaddress()
            self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"])
            multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG])
            p2sh_addr = self.nodes[i].addwitnessaddress(newaddress)
            bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False)
            p2sh_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'p2sh-segwit')['address']
            bip173_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'bech32')['address']
            assert_equal(p2sh_addr, key_to_p2sh_p2wpkh(self.pubkey[-1]))
            assert_equal(bip173_addr, key_to_p2wpkh(self.pubkey[-1]))
            assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript))
            assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript))
            p2sh_ids.append([])
            wit_ids.append([])
            for v in range(2):
                p2sh_ids[i].append([])
                wit_ids[i].append([])

        for i in range(5):
            for n in range(3):
                for v in range(2):
                    wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999")))
                    p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999")))

        self.nodes[0].generate(1) #block 163
        sync_blocks(self.nodes)

        # Make sure all nodes recognize the transactions as theirs
        assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50)
        assert_equal(self.nodes[1].getbalance(), 20*Decimal("49.999"))
        assert_equal(self.nodes[2].getbalance(), 20*Decimal("49.999"))

        self.nodes[0].generate(260) #block 423
        sync_blocks(self.nodes)

        self.log.info("Verify witness txs are skipped for mining before the fork")
        self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424
        self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425
        self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
        self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427

        self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
        self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False)
        self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False)

        self.nodes[2].generate(4) # blocks 428-431

        self.log.info("Verify previous witness txs skipped for mining can now be mined")
        assert_equal(len(self.nodes[2].getrawmempool()), 4)
        block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3)
        sync_blocks(self.nodes)
        assert_equal(len(self.nodes[2].getrawmempool()), 0)
        segwit_tx_list = self.nodes[2].getblock(block[0])["tx"]
        assert_equal(len(segwit_tx_list), 5)

        self.log.info("Verify default node can't accept txs with missing witness")
        # unsigned, no scriptsig
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False)
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False)
        # unsigned with redeem script
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0]))
        self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0]))

        self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
        assert(self.nodes[2].getblock(block[0], False) !=  self.nodes[0].getblock(block[0], False))
        assert(self.nodes[1].getblock(block[0], False) ==  self.nodes[2].getblock(block[0], False))
        for i in range(len(segwit_tx_list)):
            tx = FromHex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
            assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i]))
            assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i]))
            assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
            assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
            assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness()))

        self.log.info("Verify witness txs without witness data are invalid after the fork")
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', wit_ids[NODE_2][WIT_V0][2], sign=False)
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', wit_ids[NODE_2][WIT_V1][2], sign=False)
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
        self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))

        self.log.info("Verify default node can now use witness txs")
        self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432
        self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433
        self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434
        self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435

        self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork")
        txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
        tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
        assert(tmpl['sizelimit'] >= 3999577)  # actual maximum size is lower due to minimum mandatory non-witness data
        assert(tmpl['weightlimit'] == 4000000)
        assert(tmpl['sigoplimit'] == 80000)
        assert(tmpl['transactions'][0]['txid'] == txid)
        assert(tmpl['transactions'][0]['sigops'] == 8)

        self.nodes[0].generate(1) # Mine a block to clear the gbt cache

        self.log.info("Non-segwit miners are able to use GBT response after activation.")
        # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) ->
        #                      tx2 (segwit input, paying to a non-segwit output) ->
        #                      tx3 (non-segwit input, paying to a non-segwit output).
        # tx1 is allowed to appear in the block, but no others.
        txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996"))
        hex_tx = self.nodes[0].gettransaction(txid)['hex']
        tx = FromHex(CTransaction(), hex_tx)
        assert(tx.wit.is_null()) # This should not be a segwit input
        assert(txid1 in self.nodes[0].getrawmempool())

        # Now create tx2, which will spend from txid1.
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
        tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
        tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex']
        txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
        tx = FromHex(CTransaction(), tx2_hex)
        assert(not tx.wit.is_null())

        # Now create tx3, which will spend from txid2
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b""))
        tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))  # Huge fee
        tx.calc_sha256()
        txid3 = self.nodes[0].sendrawtransaction(ToHex(tx))
        assert(tx.wit.is_null())
        assert(txid3 in self.nodes[0].getrawmempool())

        # Now try calling getblocktemplate() without segwit support.
        template = self.nodes[0].getblocktemplate()

        # Check that tx1 is the only transaction of the 3 in the template.
        template_txids = [ t['txid'] for t in template['transactions'] ]
        assert(txid2 not in template_txids and txid3 not in template_txids)
        assert(txid1 in template_txids)

        # Check that running with segwit support results in all 3 being included.
        template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
        template_txids = [ t['txid'] for t in template['transactions'] ]
        assert(txid1 in template_txids)
        assert(txid2 in template_txids)
        assert(txid3 in template_txids)

        # Check that wtxid is properly reported in mempool entry
        assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True))

        # Mine a block to clear the gbt cache again.
        self.nodes[0].generate(1)

        self.log.info("Verify behaviour of importaddress, addwitnessaddress and listunspent")

        # Some public keys to be used later
        pubkeys = [
            "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
            "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
            "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
            "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
            "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
            "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
            "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
        ]

        # Import a compressed key and an uncompressed key, generate some multisig addresses
        self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
        uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
        self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
        compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
        assert ((self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed'] == False))
        assert ((self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed'] == True))

        self.nodes[0].importpubkey(pubkeys[0])
        compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
        self.nodes[0].importpubkey(pubkeys[1])
        compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
        self.nodes[0].importpubkey(pubkeys[2])
        uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]

        spendable_anytime = []                      # These outputs should be seen anytime after importprivkey and addmultisigaddress
        spendable_after_importaddress = []          # These outputs should be seen after importaddress
        solvable_after_importaddress = []           # These outputs should be seen after importaddress but not spendable
        unsolvable_after_importaddress = []         # These outputs should be unsolvable after importaddress
        solvable_anytime = []                       # These outputs should be solvable after importpubkey
        unseen_anytime = []                         # These outputs should never be seen

        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
        compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address'])
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address'])
        unknown_address = ["mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"]

        # Test multisig_without_privkey
        # We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
        # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.

        multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address']
        script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG])
        solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL]))

        for i in compressed_spendable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # p2sh multisig with compressed keys should always be spendable
                spendable_anytime.extend([p2sh])
                # bare multisig can be watched and signed, but is not treated as ours
                solvable_after_importaddress.extend([bare])
                # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
                spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with compressed keys should always be spendable
                spendable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress
                spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
                # P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable
                spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])

        for i in uncompressed_spendable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # p2sh multisig with uncompressed keys should always be spendable
                spendable_anytime.extend([p2sh])
                # bare multisig can be watched and signed, but is not treated as ours
                solvable_after_importaddress.extend([bare])
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with uncompressed keys should always be spendable
                spendable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
                spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
                # Witness output types with uncompressed keys are never seen
                unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])

        for i in compressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                # Multisig without private is not seen after addmultisigaddress, but seen after importaddress
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen
                solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh])
                # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress
                solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])

        for i in uncompressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
                solvable_after_importaddress.extend([bare, p2sh])
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # normal P2PKH and P2PK with uncompressed keys should always be seen
                solvable_anytime.extend([p2pkh, p2pk])
                # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
                solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
                # Witness output types with uncompressed keys are never seen
                unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])

        op1 = CScript([OP_1])
        op0 = CScript([OP_0])
        # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
        unsolvable_address = ["mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0)]
        unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D")
        unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG])
        unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)])
        p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL])
        p2wshop1 = CScript([OP_0, sha256(op1)])
        unsolvable_after_importaddress.append(unsolvablep2pkh)
        unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
        unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script
        unsolvable_after_importaddress.append(p2wshop1)
        unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided
        unsolvable_after_importaddress.append(p2shop0)

        spendable_txid = []
        solvable_txid = []
        spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2))
        solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1))
        self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0)

        importlist = []
        for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                bare = hex_str_to_bytes(v['hex'])
                importlist.append(bytes_to_hex_str(bare))
                importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)])))
            else:
                pubkey = hex_str_to_bytes(v['pubkey'])
                p2pk = CScript([pubkey, OP_CHECKSIG])
                p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG])
                importlist.append(bytes_to_hex_str(p2pk))
                importlist.append(bytes_to_hex_str(p2pkh))
                importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)])))
                importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)])))
                importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)])))

        importlist.append(bytes_to_hex_str(unsolvablep2pkh))
        importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh))
        importlist.append(bytes_to_hex_str(op1))
        importlist.append(bytes_to_hex_str(p2wshop1))

        for i in importlist:
            # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
            # exceptions and continue.
            try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True)

        self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only
        self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey

        spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
        solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
        self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # addwitnessaddress should refuse to return a witness address if an uncompressed key is used
        # note that no witness address should be returned by unsolvable addresses
        for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address:
            assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)

        # addwitnessaddress should return a witness addresses even if keys are not in the wallet
        self.nodes[0].addwitnessaddress(multisig_without_privkey_address)

        for i in compressed_spendable_address + compressed_solvable_address:
            witaddress = self.nodes[0].addwitnessaddress(i)
            # addwitnessaddress should return the same address if it is a known P2SH-witness address
            assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))

        spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
        solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
        self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # Repeat some tests. This time we don't add witness scripts with importaddress
        # Import a compressed key and an uncompressed key, generate some multisig addresses
        self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
        uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
        self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
        compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]

        self.nodes[0].importpubkey(pubkeys[5])
        compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
        self.nodes[0].importpubkey(pubkeys[6])
        uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]

        spendable_after_addwitnessaddress = []      # These outputs should be seen after importaddress
        solvable_after_addwitnessaddress=[]         # These outputs should be seen after importaddress but not spendable
        unseen_anytime = []                         # These outputs should never be seen
        solvable_anytime = []                       # These outputs should be solvable after importpubkey
        unseen_anytime = []                         # These outputs should never be seen

        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
        compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
        uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address'])
        compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])

        premature_witaddress = []

        for i in compressed_spendable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress
                spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
                premature_witaddress.append(script_to_p2sh(p2wsh))
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # P2WPKH, P2SH_P2WPKH are always spendable
                spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])

        for i in uncompressed_spendable_address + uncompressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
                unseen_anytime.extend([p2wsh, p2sh_p2wsh])
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
                unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])

        for i in compressed_solvable_address:
            v = self.nodes[0].getaddressinfo(i)
            if (v['isscript']):
                # P2WSH multisig without private key are seen after addwitnessaddress
                [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
                solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
                premature_witaddress.append(script_to_p2sh(p2wsh))
            else:
                [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
                # P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable
                solvable_anytime.extend([p2wpkh, p2sh_p2wpkh])

        self.mine_and_test_listunspent(spendable_anytime, 2)
        self.mine_and_test_listunspent(solvable_anytime, 1)
        self.mine_and_test_listunspent(spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0)

        # addwitnessaddress should refuse to return a witness address if an uncompressed key is used
        # note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress
        # premature_witaddress are not accepted until the script is added with addwitnessaddress first
        for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress:
            # This will raise an exception
            assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)

        # after importaddress it should pass addwitnessaddress
        v = self.nodes[0].getaddressinfo(compressed_solvable_address[1])
        self.nodes[0].importaddress(v['hex'],"",False,True)
        for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress:
            witaddress = self.nodes[0].addwitnessaddress(i)
            assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))

        spendable_txid.append(self.mine_and_test_listunspent(spendable_after_addwitnessaddress + spendable_anytime, 2))
        solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress + solvable_anytime, 1))
        self.mine_and_test_listunspent(unseen_anytime, 0)

        # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works
        v1_addr = program_to_witness(1, [3,5])
        v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])],{v1_addr: 1})
        v1_decoded = self.nodes[1].decoderawtransaction(v1_tx)
        assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr)
        assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305")

        # Check that spendable outputs are really spendable
        self.create_and_mine_tx_from_txids(spendable_txid)

        # import all the private keys so solvable addresses become spendable
        self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
        self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
        self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
        self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
        self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
        self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
        self.create_and_mine_tx_from_txids(solvable_txid)

        # Test that importing native P2WPKH/P2WSH scripts works
        for use_p2wsh in [False, True]:
            if use_p2wsh:
                scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a"
                transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000"
            else:
                scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87"
                transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000"

            self.nodes[1].importaddress(scriptPubKey, "", False)
            rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex']
            rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"]
            txid = self.nodes[1].sendrawtransaction(rawtxfund)

            assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
            assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)

            # Assert it is properly saved
            self.stop_node(1)
            self.start_node(1)
            assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
            assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
Example #50
0
    def run_test(self):
        test_data = os.path.join(TESTSDIR, self.options.test_data)
        if self.options.gen_test_data:
            self.generate_test_data(test_data)
        else:
            self.load_test_data(test_data)

        self.sync_all()
        stats = self.get_stats()
        expected_stats_noindex = []
        for stat_row in stats:
            expected_stats_noindex.append({
                k: v
                for k, v in stat_row.items()
                if k not in self.STATS_NEED_TXINDEX
            })

        # Make sure all valid statistics are included but nothing else is
        expected_keys = self.expected_stats[0].keys()
        assert_equal(set(stats[0].keys()), set(expected_keys))

        assert_equal(stats[0]['height'], self.start_height)
        assert_equal(stats[self.max_stat_pos]['height'],
                     self.start_height + self.max_stat_pos)

        for i in range(self.max_stat_pos + 1):
            self.log.info('Checking block %d\n' % (i))
            assert_equal(stats[i], self.expected_stats[i])

            # Check selecting block by hash too
            blockhash = self.expected_stats[i]['blockhash']
            stats_by_hash = self.nodes[0].getblockstats(
                hash_or_height=blockhash)
            assert_equal(stats_by_hash, self.expected_stats[i])

            # Check with the node that has no txindex
            stats_no_txindex = self.nodes[1].getblockstats(
                hash_or_height=blockhash,
                stats=list(expected_stats_noindex[i].keys()))
            assert_equal(stats_no_txindex, expected_stats_noindex[i])

        # Make sure each stat can be queried on its own
        for stat in expected_keys:
            for i in range(self.max_stat_pos + 1):
                result = self.nodes[0].getblockstats(
                    hash_or_height=self.start_height + i, stats=[stat])
                assert_equal(list(result.keys()), [stat])
                if result[stat] != self.expected_stats[i][stat]:
                    self.log.info(
                        'result[%s] (%d) failed, %r != %r' %
                        (stat, i, result[stat], self.expected_stats[i][stat]))
                assert_equal(result[stat], self.expected_stats[i][stat])

        # Make sure only the selected statistics are included (more than one)
        some_stats = {'minfee', 'maxfee'}
        stats = self.nodes[0].getblockstats(hash_or_height=1,
                                            stats=list(some_stats))
        assert_equal(set(stats.keys()), some_stats)

        # Test invalid parameters raise the proper json exceptions
        tip = self.start_height + self.max_stat_pos
        assert_raises_rpc_error(-8,
                                'Target block height %d after current tip %d' %
                                (tip + 1, tip),
                                self.nodes[0].getblockstats,
                                hash_or_height=tip + 1)
        assert_raises_rpc_error(-8,
                                'Target block height %d is negative' % (-1),
                                self.nodes[0].getblockstats,
                                hash_or_height=-1)

        # Make sure not valid stats aren't allowed
        inv_sel_stat = 'asdfghjkl'
        inv_stats = [
            [inv_sel_stat],
            ['minfee', inv_sel_stat],
            [inv_sel_stat, 'minfee'],
            ['minfee', inv_sel_stat, 'maxfee'],
        ]
        for inv_stat in inv_stats:
            assert_raises_rpc_error(-8,
                                    'Invalid selected statistic %s' %
                                    inv_sel_stat,
                                    self.nodes[0].getblockstats,
                                    hash_or_height=1,
                                    stats=inv_stat)

        # Make sure we aren't always returning inv_sel_stat as the culprit stat
        assert_raises_rpc_error(-8,
                                'Invalid selected statistic aaa%s' %
                                inv_sel_stat,
                                self.nodes[0].getblockstats,
                                hash_or_height=1,
                                stats=['minfee',
                                       'aaa%s' % inv_sel_stat])

        assert_raises_rpc_error(
            -8,
            'One or more of the selected stats requires -txindex enabled',
            self.nodes[1].getblockstats,
            hash_or_height=self.start_height + self.max_stat_pos)

        # Mainchain's genesis block shouldn't be found on regtest
        assert_raises_rpc_error(
            -5,
            'Block not found',
            self.nodes[0].getblockstats,
            hash_or_height=
            '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')
    def script_verification_error_test(self):
        """Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.

        Expected results:

        3) The transaction has no complete set of signatures
        4) Two script verification errors occurred
        5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error")
        6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)"""
        privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']

        inputs = [
            # Valid pay-to-pubkey script
            {
                'txid':
                '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71',
                'vout': 0
            },
            # Invalid script
            {
                'txid':
                '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547',
                'vout': 7
            },
            # Missing scriptPubKey
            {
                'txid':
                '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71',
                'vout': 1
            },
        ]

        scripts = [
            # Valid pay-to-pubkey script
            {
                'txid':
                '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71',
                'vout':
                0,
                'scriptPubKey':
                '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'
            },
            # Invalid script
            {
                'txid':
                '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547',
                'vout': 7,
                'scriptPubKey': 'badbadbadbad'
            }
        ]

        outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}

        rawTx = self.nodes[0].createrawtransaction(inputs, outputs)

        # Make sure decoderawtransaction is at least marginally sane
        decodedRawTx = self.nodes[0].decoderawtransaction(rawTx)
        for i, inp in enumerate(inputs):
            assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"])
            assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"])

        # Make sure decoderawtransaction throws if there is extra data
        assert_raises_rpc_error(-22, "TX decode failed",
                                self.nodes[0].decoderawtransaction,
                                rawTx + "00")

        rawTxSigned = self.nodes[0].signrawtransactionwithkey(
            rawTx, privKeys, scripts)

        # 3) The transaction has no complete set of signatures
        assert not rawTxSigned['complete']

        # 4) Two script verification errors occurred
        assert 'errors' in rawTxSigned
        assert_equal(len(rawTxSigned['errors']), 2)

        # 5) Script verification errors have certain properties
        assert 'txid' in rawTxSigned['errors'][0]
        assert 'vout' in rawTxSigned['errors'][0]
        assert 'witness' in rawTxSigned['errors'][0]
        assert 'scriptSig' in rawTxSigned['errors'][0]
        assert 'sequence' in rawTxSigned['errors'][0]
        assert 'error' in rawTxSigned['errors'][0]

        # 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)
        assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid'])
        assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout'])
        assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid'])
        assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
        assert not rawTxSigned['errors'][0]['witness']

        # Perform same test with signrawtransaction
        rawTxSigned2 = self.nodes[0].signrawtransaction(
            rawTx, scripts, privKeys)
        assert_equal(rawTxSigned, rawTxSigned2)

        # Now test signing failure for transaction with input witnesses
        p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000"

        rawTxSigned = self.nodes[0].signrawtransactionwithwallet(p2wpkh_raw_tx)

        # 7) The transaction has no complete set of signatures
        assert not rawTxSigned['complete']

        # 8) Two script verification errors occurred
        assert 'errors' in rawTxSigned
        assert_equal(len(rawTxSigned['errors']), 2)

        # 9) Script verification errors have certain properties
        assert 'txid' in rawTxSigned['errors'][0]
        assert 'vout' in rawTxSigned['errors'][0]
        assert 'witness' in rawTxSigned['errors'][0]
        assert 'scriptSig' in rawTxSigned['errors'][0]
        assert 'sequence' in rawTxSigned['errors'][0]
        assert 'error' in rawTxSigned['errors'][0]

        # Non-empty witness checked here
        assert_equal(rawTxSigned['errors'][1]['witness'], [
            "304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01",
            "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"
        ])
        assert not rawTxSigned['errors'][0]['witness']

        # Perform same test with signrawtransaction
        rawTxSigned2 = self.nodes[0].signrawtransaction(p2wpkh_raw_tx)
        assert_equal(rawTxSigned, rawTxSigned2)
Example #52
0
    def run_test(self):
        # Check that there's no UTXO on the node
        node = self.nodes[0]
        assert_equal(len(node.listunspent()), 0)

        # Note each time we call generate, all generated coins go into
        # the same address, so we call twice to get two addresses w/50 each
        node.generatetoaddress(nblocks=1,
                               address=node.getnewaddress(label='coinbase'))
        node.generatetoaddress(nblocks=101,
                               address=node.getnewaddress(label='coinbase'))
        assert_equal(node.getbalance(), 100)

        # there should be 2 address groups
        # each with 1 address with a balance of 50 Bitcoins
        address_groups = node.listaddressgroupings()
        assert_equal(len(address_groups), 2)
        # the addresses aren't linked now, but will be after we send to the
        # common address
        linked_addresses = set()
        for address_group in address_groups:
            assert_equal(len(address_group), 1)
            assert_equal(len(address_group[0]), 3)
            assert_equal(address_group[0][1], 50)
            assert_equal(address_group[0][2], 'coinbase')
            linked_addresses.add(address_group[0][0])

        # send 50 from each address to a third address not in this wallet
        common_address = "msf4WtN1YQKXvNtvdFYt9JBnUD2FB41kjr"
        node.sendmany(
            amounts={common_address: 100},
            subtractfeefrom=[common_address],
            minconf=1,
        )
        # there should be 1 address group, with the previously
        # unlinked addresses now linked (they both have 0 balance)
        address_groups = node.listaddressgroupings()
        assert_equal(len(address_groups), 1)
        assert_equal(len(address_groups[0]), 2)
        assert_equal(set([a[0] for a in address_groups[0]]), linked_addresses)
        assert_equal([a[1] for a in address_groups[0]], [0, 0])

        node.generate(1)

        # we want to reset so that the "" label has what's expected.
        # otherwise we're off by exactly the fee amount as that's mined
        # and matures in the next 100 blocks
        amount_to_send = 1.0

        # Create labels and make sure subsequent label API calls
        # recognize the label/address associations.
        labels = [Label(name) for name in ("a", "b", "c", "d", "e")]
        for label in labels:
            address = node.getnewaddress(label.name)
            label.add_receive_address(address)
            label.verify(node)

        # Check all labels are returned by listlabels.
        assert_equal(node.listlabels(),
                     sorted(['coinbase'] + [label.name for label in labels]))

        # Send a transaction to each label.
        for label in labels:
            node.sendtoaddress(label.addresses[0], amount_to_send)
            label.verify(node)

        # Check the amounts received.
        node.generate(1)
        for label in labels:
            assert_equal(node.getreceivedbyaddress(label.addresses[0]),
                         amount_to_send)
            assert_equal(node.getreceivedbylabel(label.name), amount_to_send)

        for i, label in enumerate(labels):
            to_label = labels[(i + 1) % len(labels)]
            node.sendtoaddress(to_label.addresses[0], amount_to_send)
        node.generate(1)
        for label in labels:
            address = node.getnewaddress(label.name)
            label.add_receive_address(address)
            label.verify(node)
            assert_equal(node.getreceivedbylabel(label.name), 2)
            label.verify(node)
        node.generate(101)

        # Check that setlabel can assign a label to a new unused address.
        for label in labels:
            address = node.getnewaddress()
            node.setlabel(address, label.name)
            label.add_address(address)
            label.verify(node)
            assert_raises_rpc_error(-11, "No addresses with label",
                                    node.getaddressesbylabel, "")

        # Check that addmultisigaddress can assign labels.
        if not self.options.descriptors:
            for label in labels:
                addresses = []
                for _ in range(10):
                    addresses.append(node.getnewaddress())
                multisig_address = node.addmultisigaddress(
                    5, addresses, label.name)['address']
                label.add_address(multisig_address)
                label.purpose[multisig_address] = "send"
                label.verify(node)
            node.generate(101)

        # Check that setlabel can change the label of an address from a
        # different label.
        change_label(node, labels[0].addresses[0], labels[0], labels[1])

        # Check that setlabel can set the label of an address already
        # in the label. This is a no-op.
        change_label(node, labels[2].addresses[0], labels[2], labels[2])

        self.log.info('Check watchonly labels')
        node.createwallet(wallet_name='watch_only', disable_private_keys=True)
        wallet_watch_only = node.get_wallet_rpc('watch_only')
        BECH32_VALID = {
            '✔️_VER15_PROG40':
            'bcrt10qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqxkg7fn',
            '✔️_VER16_PROG03': 'bcrt1sqqqqq8uhdgr',
            '✔️_VER16_PROB02': 'bcrt1sqqqq4wstyw',
        }
        BECH32_INVALID = {
            '❌_VER15_PROG41':
            'bcrt1sqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqajlxj8',
            '❌_VER16_PROB01': 'bcrt1sqq5r4036',
        }
        for l in BECH32_VALID:
            ad = BECH32_VALID[l]
            wallet_watch_only.importaddress(label=l, rescan=False, address=ad)
            node.generatetoaddress(1, ad)
            assert_equal(wallet_watch_only.getaddressesbylabel(label=l),
                         {ad: {
                             'purpose': 'receive'
                         }})
            assert_equal(wallet_watch_only.getreceivedbylabel(label=l), 0)
        for l in BECH32_INVALID:
            ad = BECH32_INVALID[l]
            assert_raises_rpc_error(
                -5,
                "Address is not valid" if self.options.descriptors else
                "Invalid Particl address or script",
                lambda: wallet_watch_only.importaddress(
                    label=l, rescan=False, address=ad),
            )
Example #53
0
    def run_test(self):
        # Check that there's no UTXO on none of the nodes
        assert_equal(len(self.nodes[0].listunspent()), 0)
        assert_equal(len(self.nodes[1].listunspent()), 0)
        assert_equal(len(self.nodes[2].listunspent()), 0)

        self.log.info("Mining blocks...")

        self.nodes[0].generate(1)

        walletinfo = self.nodes[0].getwalletinfo()
        assert_equal(walletinfo['immature_balance'], 250)
        assert_equal(walletinfo['balance'], 0)

        self.sync_all([self.nodes[0:3]])
        self.nodes[1].generate(101)
        self.sync_all([self.nodes[0:3]])

        assert_equal(self.nodes[0].getbalance(), 250)
        assert_equal(self.nodes[1].getbalance(), 250)
        assert_equal(self.nodes[2].getbalance(), 0)

        # Check that only first and second nodes have UTXOs
        utxos = self.nodes[0].listunspent()
        assert_equal(len(utxos), 1)
        assert_equal(len(self.nodes[1].listunspent()), 1)
        assert_equal(len(self.nodes[2].listunspent()), 0)

        walletinfo = self.nodes[0].getwalletinfo()
        assert_equal(walletinfo['immature_balance'], 0)

        # Exercise locking of unspent outputs
        unspent_0 = self.nodes[1].listunspent()[0]
        unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
        self.nodes[1].lockunspent(False, [unspent_0])
        assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[1].sendtoaddress, self.nodes[1].getnewaddress(), 20)
        assert_equal([unspent_0], self.nodes[1].listlockunspent())
        self.nodes[1].lockunspent(True, [unspent_0])
        assert_equal(len(self.nodes[1].listlockunspent()), 0)

        # Send 21 PIV from 1 to 0 using sendtoaddress call.
        self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 21)
        self.nodes[1].generate(1)
        self.sync_all([self.nodes[0:3]])

        # Node0 should have two unspent outputs.
        # Create a couple of transactions to send them to node2, submit them through
        # node1, and make sure both node0 and node2 pick them up properly:
        node0utxos = self.nodes[0].listunspent(1)
        assert_equal(len(node0utxos), 2)

        # create both transactions
        fee_per_kbyte = Decimal('0.001')
        txns_to_send = []
        for utxo in node0utxos:
            inputs = []
            outputs = {}
            inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]})
            outputs[self.nodes[2].getnewaddress("from1")] = float(utxo["amount"]) - float(fee_per_kbyte)
            raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
            txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx))

        # Have node 1 (miner) send the transactions
        self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
        self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True)

        # Have node1 mine a block to confirm transactions:
        self.nodes[1].generate(1)
        self.sync_all([self.nodes[0:3]])

        assert_equal(self.nodes[0].getbalance(), 0)
        node_2_expected_bal = Decimal('250') + Decimal('21') - 2 * fee_per_kbyte
        node_2_bal = self.nodes[2].getbalance()
        assert_equal(node_2_bal, node_2_expected_bal)
        assert_equal(self.nodes[2].getbalance("from1"), node_2_expected_bal)

        # Send 10 CCE normal
        address = self.nodes[0].getnewaddress("test")
        self.nodes[2].settxfee(float(fee_per_kbyte))
        txid = self.nodes[2].sendtoaddress(address, 10, "", "")
        fee = self.nodes[2].gettransaction(txid)["fee"]
        node_2_bal -= (Decimal('10') - fee)
        assert_equal(self.nodes[2].getbalance(), node_2_bal)
        self.nodes[2].generate(1)
        self.sync_all([self.nodes[0:3]])
        node_0_bal = self.nodes[0].getbalance()
        assert_equal(node_0_bal, Decimal('10'))

        # Sendmany 10 CCE
        txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "")
        fee = self.nodes[2].gettransaction(txid)["fee"]
        self.nodes[2].generate(1)
        self.sync_all([self.nodes[0:3]])
        node_0_bal += Decimal('10')
        node_2_bal -= (Decimal('10') - fee)
        assert_equal(self.nodes[2].getbalance(), node_2_bal)
        assert_equal(self.nodes[0].getbalance(), node_0_bal)
        assert_fee_amount(-fee, self.get_vsize(self.nodes[2].getrawtransaction(txid)), fee_per_kbyte)

        # This will raise an exception since generate does not accept a string
        assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")

        # Import address and private key to check correct behavior of spendable unspents
        # 1. Send some coins to generate new UTXO
        address_to_import = self.nodes[2].getnewaddress()
        self.nodes[0].sendtoaddress(address_to_import, 1)
        self.nodes[0].generate(1)
        self.sync_all([self.nodes[0:3]])

        # 2. Import address from node2 to node1
        self.nodes[1].importaddress(address_to_import)

        # 3. Validate that the imported address is watch-only on node1
        assert(self.nodes[1].validateaddress(address_to_import)["iswatchonly"])

        # 4. Check that the unspents after import are not spendable
        listunspent = self.nodes[1].listunspent(1, 9999999, [], 3)
        assert_array_result(listunspent,
                           {"address": address_to_import},
                           {"spendable": False})

        # 5. Import private key of the previously imported address on node1
        priv_key = self.nodes[2].dumpprivkey(address_to_import)
        self.nodes[1].importprivkey(priv_key)

        # 6. Check that the unspents are now spendable on node1
        assert_array_result(self.nodes[1].listunspent(),
                           {"address": address_to_import},
                           {"spendable": True})

        #check if wallet or blochchain maintenance changes the balance
        self.sync_all([self.nodes[0:3]])
        blocks = self.nodes[0].generate(2)
        self.sync_all([self.nodes[0:3]])
        balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
        block_count = self.nodes[0].getblockcount()

        maintenance = [
            '-rescan',
            '-reindex',
        ]
        for m in maintenance:
            self.log.info("check " + m)
            self.stop_nodes()
            # set lower ancestor limit for later
            self.start_node(0, [m])
            self.start_node(1, [m])
            self.start_node(2, [m])
            if m == '-reindex':
                # reindex will leave rpc warm up "early"; Wait for it to finish
                wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
            assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])

        # Exercise listsinceblock with the last two blocks
        coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
        assert_equal(coinbase_tx_1["lastblock"], blocks[1])
        assert_equal(len(coinbase_tx_1["transactions"]), 1)
        assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
        assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0)
Example #54
0
    def run_test(self):
        self.nodes[1].generate(COINBASE_MATURITY)
        sync_blocks(self.nodes)
        balance = self.nodes[0].getbalance()
        txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
        txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
        txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
        sync_mempools(self.nodes)
        self.nodes[1].generate(1)

        # Can not abandon non-wallet transaction
        assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', lambda: self.nodes[0].abandontransaction(txid='ff' * 32))
        # Can not abandon confirmed transaction
        assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: self.nodes[0].abandontransaction(txid=txA))

        sync_blocks(self.nodes)
        newbalance = self.nodes[0].getbalance()
        assert(balance - newbalance < Decimal("0.01")) #no more than fees lost
        balance = newbalance

        # Disconnect nodes so node0's transactions don't get into node1's mempool
        disconnect_nodes(self.nodes[0], 1)

        # Identify the 10btc outputs
        nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txA, 1)["vout"]) if vout["value"] == Decimal("10"))
        nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txB, 1)["vout"]) if vout["value"] == Decimal("10"))
        nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txC, 1)["vout"]) if vout["value"] == Decimal("10"))

        inputs =[]
        # spend 10btc outputs from txA and txB
        inputs.append({"txid":txA, "vout":nA})
        inputs.append({"txid":txB, "vout":nB})
        outputs = {}

        outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998")
        outputs[self.nodes[1].getnewaddress()] = Decimal("5")
        signed = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs))
        txAB1 = self.nodes[0].sendrawtransaction(signed["hex"])

        # Identify the 14.99998btc output
        nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998"))

        #Create a child tx spending AB1 and C
        inputs = []
        inputs.append({"txid":txAB1, "vout":nAB})
        inputs.append({"txid":txC, "vout":nC})
        outputs = {}
        outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996")
        signed2 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs))
        txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"])

        # Create a child tx spending ABC2
        signed3_change = Decimal("24.999")
        inputs = [ {"txid":txABC2, "vout":0} ]
        outputs = { self.nodes[0].getnewaddress(): signed3_change }
        signed3 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs))
        # note tx is never directly referenced, only abandoned as a child of the above
        self.nodes[0].sendrawtransaction(signed3["hex"])

        # In mempool txs from self should increase balance from change
        newbalance = self.nodes[0].getbalance()
        assert_equal(newbalance, balance - Decimal("30") + signed3_change)
        balance = newbalance

        # Restart the node with a higher min relay fee so the parent tx is no longer in mempool
        # TODO: redo with eviction
        self.stop_node(0)
        self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])

        # Verify txs no longer in either node's mempool
        assert_equal(len(self.nodes[0].getrawmempool()), 0)
        assert_equal(len(self.nodes[1].getrawmempool()), 0)

        # Not in mempool txs from self should only reduce balance
        # inputs are still spent, but change not received
        newbalance = self.nodes[0].getbalance()
        assert_equal(newbalance, balance - signed3_change)
        # Unconfirmed received funds that are not in mempool, also shouldn't show
        # up in unconfirmed balance
        unconfbalance = self.nodes[0].getunconfirmedbalance() + self.nodes[0].getbalance()
        assert_equal(unconfbalance, newbalance)
        # Also shouldn't show up in listunspent
        assert(not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)])
        balance = newbalance

        # Abandon original transaction and verify inputs are available again
        # including that the child tx was also abandoned
        self.nodes[0].abandontransaction(txAB1)
        newbalance = self.nodes[0].getbalance()
        assert_equal(newbalance, balance + Decimal("30"))
        balance = newbalance

        # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned
        self.stop_node(0)
        self.start_node(0, extra_args=["-minrelaytxfee=0.00001"])
        assert_equal(len(self.nodes[0].getrawmempool()), 0)
        assert_equal(self.nodes[0].getbalance(), balance)

        # But if it is received again then it is unabandoned
        # And since now in mempool, the change is available
        # But its child tx remains abandoned
        self.nodes[0].sendrawtransaction(signed["hex"])
        newbalance = self.nodes[0].getbalance()
        assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998"))
        balance = newbalance

        # Send child tx again so it is unabandoned
        self.nodes[0].sendrawtransaction(signed2["hex"])
        newbalance = self.nodes[0].getbalance()
        assert_equal(newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996"))
        balance = newbalance

        # Remove using high relay fee again
        self.stop_node(0)
        self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])
        assert_equal(len(self.nodes[0].getrawmempool()), 0)
        newbalance = self.nodes[0].getbalance()
        assert_equal(newbalance, balance - Decimal("24.9996"))
        balance = newbalance

        # Create a double spend of AB1 by spending again from only A's 10 output
        # Mine double spend from node 1
        inputs =[]
        inputs.append({"txid":txA, "vout":nA})
        outputs = {}
        outputs[self.nodes[1].getnewaddress()] = Decimal("9.99")
        tx = self.nodes[0].createrawtransaction(inputs, outputs)
        signed = self.nodes[0].signrawtransactionwithwallet(tx)
        self.nodes[1].sendrawtransaction(signed["hex"])
        self.nodes[1].generate(1)

        connect_nodes(self.nodes[0], 1)
        sync_blocks(self.nodes)

        # Verify that B and C's 10 BTC outputs are available for spending again because AB1 is now conflicted
        newbalance = self.nodes[0].getbalance()
        assert_equal(newbalance, balance + Decimal("20"))
        balance = newbalance

        # There is currently a minor bug around this and so this test doesn't work.  See Issue #7315
        # Invalidate the block with the double spend and B's 10 BTC output should no longer be available
        # Don't think C's should either
        self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
        newbalance = self.nodes[0].getbalance()
        #assert_equal(newbalance, balance - Decimal("10"))
        self.log.info("If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer")
        self.log.info("conflicted has not resumed causing its inputs to be seen as spent.  See Issue #7315")
        self.log.info(str(balance) + " -> " + str(newbalance) + " ?")
Example #55
0
def test_locked_wallet_fails(rbf_node, dest_address):
    rbfid = spend_one_input(rbf_node, dest_address)
    rbf_node.walletlock()
    assert_raises_rpc_error(
        -13, "Please enter the wallet passphrase with walletpassphrase first.",
        rbf_node.bumpfee, rbfid)
Example #56
0
    def _test_getchaintxstats(self):
        self.log.info("Test getchaintxstats")

        # Test `getchaintxstats` invalid extra parameters
        assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0)

        # Test `getchaintxstats` invalid `nblocks`
        assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '')
        assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1)
        assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount())

        # Test `getchaintxstats` invalid `blockhash`
        assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0)
        assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0')
        blockhash = self.nodes[0].getblockhash(200)
        self.nodes[0].invalidateblock(blockhash)
        assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash)
        self.nodes[0].reconsiderblock(blockhash)

        chaintxstats = self.nodes[0].getchaintxstats(1)
        # 200 txs plus genesis tx
        assert_equal(chaintxstats['txcount'], 201)
        # tx rate should be 1 per 10 minutes, or 1/600
        # we have to round because of binary math
        assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1))

        b1_hash = self.nodes[0].getblockhash(1)
        b1 = self.nodes[0].getblock(b1_hash)
        b200_hash = self.nodes[0].getblockhash(200)
        b200 = self.nodes[0].getblock(b200_hash)
        time_diff = b200['mediantime'] - b1['mediantime']

        chaintxstats = self.nodes[0].getchaintxstats()
        assert_equal(chaintxstats['time'], b200['time'])
        assert_equal(chaintxstats['txcount'], 201)
        assert_equal(chaintxstats['window_final_block_hash'], b200_hash)
        assert_equal(chaintxstats['window_block_count'], 199)
        assert_equal(chaintxstats['window_tx_count'], 199)
        assert_equal(chaintxstats['window_interval'], time_diff)
        assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199))

        chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash)
        assert_equal(chaintxstats['time'], b1['time'])
        assert_equal(chaintxstats['txcount'], 2)
        assert_equal(chaintxstats['window_final_block_hash'], b1_hash)
        assert_equal(chaintxstats['window_block_count'], 0)
        assert('window_tx_count' not in chaintxstats)
        assert('window_interval' not in chaintxstats)
        assert('txrate' not in chaintxstats)
    def run_test(self):
        # Generate block to get out of IBD
        self.nodes[0].generate(1)
        self.sync_blocks()

        # save the number of coinbase reward addresses so far
        num_cb_reward_addresses = len(self.nodes[1].listreceivedbyaddress(
            minconf=0, include_empty=True, include_watchonly=True))

        self.log.info("listreceivedbyaddress Test")

        # Send from node 0 to 1
        addr = self.nodes[1].getnewaddress()
        txid = self.nodes[0].sendtoaddress(addr, 0.1)
        self.sync_all()

        # Check not listed in listreceivedbyaddress because has 0 confirmations
        assert_array_result(self.nodes[1].listreceivedbyaddress(),
                            {"address": addr}, {}, True)
        # Bury Tx under 10 block so it will be returned by listreceivedbyaddress
        self.nodes[1].generate(10)
        self.sync_all()
        assert_array_result(self.nodes[1].listreceivedbyaddress(),
                            {"address": addr}, {
                                "address": addr,
                                "label": "",
                                "amount": Decimal("0.1"),
                                "confirmations": 10,
                                "txids": [
                                    txid,
                                ]
                            })
        # With min confidence < 10
        assert_array_result(self.nodes[1].listreceivedbyaddress(5),
                            {"address": addr}, {
                                "address": addr,
                                "label": "",
                                "amount": Decimal("0.1"),
                                "confirmations": 10,
                                "txids": [
                                    txid,
                                ]
                            })
        # With min confidence > 10, should not find Tx
        assert_array_result(self.nodes[1].listreceivedbyaddress(11),
                            {"address": addr}, {}, True)

        # Empty Tx
        empty_addr = self.nodes[1].getnewaddress()
        assert_array_result(self.nodes[1].listreceivedbyaddress(0, True),
                            {"address": empty_addr}, {
                                "address": empty_addr,
                                "label": "",
                                "amount": 0,
                                "confirmations": 0,
                                "txids": []
                            })

        # Test Address filtering
        # Only on addr
        expected = {
            "address": addr,
            "label": "",
            "amount": Decimal("0.1"),
            "confirmations": 10,
            "txids": [
                txid,
            ]
        }
        res = self.nodes[1].listreceivedbyaddress(minconf=0,
                                                  include_empty=True,
                                                  include_watchonly=True,
                                                  address_filter=addr)
        assert_array_result(res, {"address": addr}, expected)
        assert_equal(len(res), 1)
        # Test for regression on CLI calls with address string (#14173)
        cli_res = self.nodes[1].cli.listreceivedbyaddress(0, True, True, addr)
        assert_array_result(cli_res, {"address": addr}, expected)
        assert_equal(len(cli_res), 1)
        # Error on invalid address
        assert_raises_rpc_error(-4,
                                "address_filter parameter was invalid",
                                self.nodes[1].listreceivedbyaddress,
                                minconf=0,
                                include_empty=True,
                                include_watchonly=True,
                                address_filter="bamboozling")
        # Another address receive money
        res = self.nodes[1].listreceivedbyaddress(0, True, True)
        assert_equal(len(res),
                     2 + num_cb_reward_addresses)  # Right now 2 entries
        other_addr = self.nodes[1].getnewaddress()
        txid2 = self.nodes[0].sendtoaddress(other_addr, 0.1)
        self.nodes[0].generate(1)
        self.sync_all()
        # Same test as above should still pass
        expected = {
            "address": addr,
            "label": "",
            "amount": Decimal("0.1"),
            "confirmations": 11,
            "txids": [
                txid,
            ]
        }
        res = self.nodes[1].listreceivedbyaddress(0, True, True, addr)
        assert_array_result(res, {"address": addr}, expected)
        assert_equal(len(res), 1)
        # Same test as above but with other_addr should still pass
        expected = {
            "address": other_addr,
            "label": "",
            "amount": Decimal("0.1"),
            "confirmations": 1,
            "txids": [
                txid2,
            ]
        }
        res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr)
        assert_array_result(res, {"address": other_addr}, expected)
        assert_equal(len(res), 1)
        # Should be two entries though without filter
        res = self.nodes[1].listreceivedbyaddress(0, True, True)
        assert_equal(len(res), 3 + num_cb_reward_addresses)  # Became 3 entries

        # Not on random addr
        other_addr = self.nodes[0].getnewaddress(
        )  # note on node[0]! just a random addr
        res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr)
        assert_equal(len(res), 0)

        self.log.info("getreceivedbyaddress Test")

        # Send from node 0 to 1
        addr = self.nodes[1].getnewaddress()
        txid = self.nodes[0].sendtoaddress(addr, 0.1)
        self.sync_all()

        # Check balance is 0 because of 0 confirmations
        balance = self.nodes[1].getreceivedbyaddress(addr)
        assert_equal(balance, Decimal("0.0"))

        # Check balance is 0.1
        balance = self.nodes[1].getreceivedbyaddress(addr, 0)
        assert_equal(balance, Decimal("0.1"))

        # Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress
        self.nodes[1].generate(10)
        self.sync_all()
        balance = self.nodes[1].getreceivedbyaddress(addr)
        assert_equal(balance, Decimal("0.1"))

        # Trying to getreceivedby for an address the wallet doesn't own should return an error
        assert_raises_rpc_error(-4, "Address not found in wallet",
                                self.nodes[0].getreceivedbyaddress, addr)

        self.log.info("listreceivedbylabel + getreceivedbylabel Test")

        # set pre-state
        label = ''
        address = self.nodes[1].getnewaddress()
        assert_equal(self.nodes[1].getaddressinfo(address)['label'], label)
        received_by_label_json = [
            r for r in self.nodes[1].listreceivedbylabel()
            if r["label"] == label
        ][0]
        balance_by_label = self.nodes[1].getreceivedbylabel(label)

        txid = self.nodes[0].sendtoaddress(addr, 0.1)
        self.sync_all()

        # listreceivedbylabel should return received_by_label_json because of 0 confirmations
        assert_array_result(self.nodes[1].listreceivedbylabel(),
                            {"label": label}, received_by_label_json)

        # getreceivedbyaddress should return same balance because of 0 confirmations
        balance = self.nodes[1].getreceivedbylabel(label)
        assert_equal(balance, balance_by_label)

        self.nodes[1].generate(10)
        self.sync_all()
        # listreceivedbylabel should return updated received list
        assert_array_result(
            self.nodes[1].listreceivedbylabel(), {"label": label}, {
                "label": received_by_label_json["label"],
                "amount": (received_by_label_json["amount"] + Decimal("0.1"))
            })

        # getreceivedbylabel should return updated receive total
        balance = self.nodes[1].getreceivedbylabel(label)
        assert_equal(balance, balance_by_label + Decimal("0.1"))

        # Create a new label named "mynewlabel" that has a 0 balance
        address = self.nodes[1].getnewaddress()
        self.nodes[1].setlabel(address, "mynewlabel")
        received_by_label_json = [
            r for r in self.nodes[1].listreceivedbylabel(0, True)
            if r["label"] == "mynewlabel"
        ][0]

        # Test includeempty of listreceivedbylabel
        assert_equal(received_by_label_json["amount"], Decimal("0.0"))

        # Test getreceivedbylabel for 0 amount labels
        balance = self.nodes[1].getreceivedbylabel("mynewlabel")
        assert_equal(balance, Decimal("0.0"))
Example #58
0
    def run_test(self):
        node = self.nodes[0]
        node.generate(1) # Leave IBD for sethdseed

        self.nodes[0].createwallet(wallet_name='w0')
        w0 = node.get_wallet_rpc('w0')
        address1 = w0.getnewaddress()

        self.log.info("Test disableprivatekeys creation.")
        self.nodes[0].createwallet(wallet_name='w1', disable_private_keys=True)
        w1 = node.get_wallet_rpc('w1')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w1.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w1.getrawchangeaddress)
        w1.importpubkey(w0.getaddressinfo(address1)['pubkey'])

        self.log.info('Test that private keys cannot be imported')
        addr = w0.getnewaddress('', 'legacy')
        privkey = w0.dumpprivkey(addr)
        assert_raises_rpc_error(-4, 'Cannot import private keys to a wallet with private keys disabled', w1.importprivkey, privkey)
        result = w1.importmulti([{'scriptPubKey': {'address': addr}, 'timestamp': 'now', 'keys': [privkey]}])
        assert not result[0]['success']
        assert 'warning' not in result[0]
        assert_equal(result[0]['error']['code'], -4)
        assert_equal(result[0]['error']['message'], 'Cannot import private keys to a wallet with private keys disabled')

        self.log.info("Test blank creation with private keys disabled.")
        self.nodes[0].createwallet(wallet_name='w2', disable_private_keys=True, blank=True)
        w2 = node.get_wallet_rpc('w2')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w2.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w2.getrawchangeaddress)
        w2.importpubkey(w0.getaddressinfo(address1)['pubkey'])

        self.log.info("Test blank creation with private keys enabled.")
        self.nodes[0].createwallet(wallet_name='w3', disable_private_keys=False, blank=True)
        w3 = node.get_wallet_rpc('w3')
        assert_equal(w3.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w3.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w3.getrawchangeaddress)
        # Import private key
        w3.importprivkey(w0.dumpprivkey(address1))
        # Imported private keys are currently ignored by the keypool
        assert_equal(w3.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w3.getnewaddress)
        # Set the seed
        w3.sethdseed()
        assert_equal(w3.getwalletinfo()['keypoolsize'], 1)
        w3.getnewaddress()
        w3.getrawchangeaddress()

        self.log.info("Test blank creation with privkeys enabled and then encryption")
        self.nodes[0].createwallet(wallet_name='w4', disable_private_keys=False, blank=True)
        w4 = node.get_wallet_rpc('w4')
        assert_equal(w4.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getrawchangeaddress)
        # Encrypt the wallet. Nothing should change about the keypool
        w4.encryptwallet('pass')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getrawchangeaddress)
        # Now set a seed and it should work. Wallet should also be encrypted
        w4.walletpassphrase('pass', 2)
        w4.sethdseed()
        w4.getnewaddress()
        w4.getrawchangeaddress()

        self.log.info("Test blank creation with privkeys disabled and then encryption")
        self.nodes[0].createwallet(wallet_name='w5', disable_private_keys=True, blank=True)
        w5 = node.get_wallet_rpc('w5')
        assert_equal(w5.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getrawchangeaddress)
        # Encrypt the wallet
        assert_raises_rpc_error(-16, "Error: wallet does not contain private keys, nothing to encrypt.", w5.encryptwallet, 'pass')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w5.getrawchangeaddress)

        self.log.info('New blank and encrypted wallets can be created')
        self.nodes[0].createwallet(wallet_name='wblank', disable_private_keys=False, blank=True, passphrase='thisisapassphrase')
        wblank = node.get_wallet_rpc('wblank')
        assert_raises_rpc_error(-13, "Error: Please enter the wallet passphrase with walletpassphrase first.", wblank.signmessage, "needanargument", "test")
        wblank.walletpassphrase('thisisapassphrase', 10)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", wblank.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys", wblank.getrawchangeaddress)

        self.log.info('Test creating a new encrypted wallet.')
        # Born encrypted wallet is created (has keys)
        self.nodes[0].createwallet(wallet_name='w6', disable_private_keys=False, blank=False, passphrase='thisisapassphrase')
        w6 = node.get_wallet_rpc('w6')
        assert_raises_rpc_error(-13, "Error: Please enter the wallet passphrase with walletpassphrase first.", w6.signmessage, "needanargument", "test")
        w6.walletpassphrase('thisisapassphrase', 10)
        w6.signmessage(w6.getnewaddress('', 'legacy'), "test")
        w6.keypoolrefill(1)
        # There should only be 1 key
        walletinfo = w6.getwalletinfo()
        assert_equal(walletinfo['keypoolsize'], 1)
        assert_equal(walletinfo['keypoolsize_hd_internal'], 1)
        # Allow empty passphrase, but there should be a warning
        resp = self.nodes[0].createwallet(wallet_name='w7', disable_private_keys=False, blank=False, passphrase='')
        assert_equal(resp['warning'], 'Empty string given as passphrase, wallet will not be encrypted.')
        w7 = node.get_wallet_rpc('w7')
        assert_raises_rpc_error(-15, 'Error: running with an unencrypted wallet, but walletpassphrase was called.', w7.walletpassphrase, '', 10)

        self.log.info('Test making a wallet with avoid reuse flag')
        self.nodes[0].createwallet('w8', False, False, '', True) # Use positional arguments to check for bug where avoid_reuse could not be set for wallets without needing them to be encrypted
        w8 = node.get_wallet_rpc('w8')
        assert_raises_rpc_error(-15, 'Error: running with an unencrypted wallet, but walletpassphrase was called.', w7.walletpassphrase, '', 10)
        assert_equal(w8.getwalletinfo()["avoid_reuse"], True)

        self.log.info('Using a passphrase with private keys disabled returns error')
        assert_raises_rpc_error(-4, 'Passphrase provided but private keys are disabled. A passphrase is only used to encrypt private keys, so cannot be used for wallets with private keys disabled.', self.nodes[0].createwallet, wallet_name='w9', disable_private_keys=True, passphrase='thisisapassphrase')
Example #59
0
    def test_prioritised_transactions(self):
        # Ensure that fee deltas used via prioritisetransaction are
        # correctly used by replacement logic

        # 1. Check that feeperkb uses modified fees
        tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))

        tx1a = CTransaction()
        tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1a.vout = [
            CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT),
            CTxOut(int(0.1 * COIN))
        ]
        tx1a_hex = txToHex(tx1a)
        tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)

        # Higher fee, but the actual fee per KB is much lower.
        tx1b = CTransaction()
        tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
        tx1b.vout = [
            CTxOut(int(0.001 * COIN), CScript([b'a' * 740000])),
            CTxOut(int(1.1 * COIN - 0.001 * COIN))
        ]
        tx1b_hex = txToHex(tx1b)

        # Verify tx1b cannot replace tx1a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx1b_hex, 0)

        # Use prioritisetransaction to set tx1a's fee to 0.
        self.nodes[0].prioritisetransaction(txid=tx1a_txid,
                                            fee_delta=int(-0.1 * COIN))

        # Now tx1b should be able to replace tx1a
        tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)

        assert tx1b_txid in self.nodes[0].getrawmempool()

        # 2. Check that absolute fee checks use modified fee.
        tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))

        tx2a = CTransaction()
        tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2a.vout = [
            CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT),
            CTxOut(int(0.1 * COIN))
        ]
        tx2a_hex = txToHex(tx2a)
        self.nodes[0].sendrawtransaction(tx2a_hex, 0)

        # Lower fee, but we'll prioritise it
        tx2b = CTransaction()
        tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
        tx2b.vout = [
            CTxOut(int(1.01 * COIN), DUMMY_P2WPKH_SCRIPT),
            CTxOut(int(1.1 * COIN - 1.01 * COIN))
        ]
        tx2b.rehash()
        tx2b_hex = txToHex(tx2b)

        # Verify tx2b cannot replace tx2a.
        assert_raises_rpc_error(-26, "insufficient fee",
                                self.nodes[0].sendrawtransaction, tx2b_hex, 0)

        # Now prioritise tx2b to have a higher modified fee
        self.nodes[0].prioritisetransaction(txid=tx2b.hash,
                                            fee_delta=int(0.1 * COIN))

        # tx2b should now be accepted
        tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, 0)

        assert tx2b_txid in self.nodes[0].getrawmempool()
Example #60
0
    def run_test(self):
        node = self.nodes[0]

        self.log.info('getmininginfo')
        mining_info = node.getmininginfo()
        assert_equal(mining_info['blocks'], 200)
        assert_equal(mining_info['chain'], 'regtest')
        assert_equal(mining_info['currentblocktx'], 0)
        assert_equal(mining_info['currentblockweight'], 0)
        assert_equal(mining_info['difficulty'],
                     Decimal('4.656542373906925E-10'))
        assert_equal(mining_info['networkhashps'],
                     Decimal('0.03333333333333333'))
        assert_equal(mining_info['pooledtx'], 0)

        # Mine a block to leave initial block download
        node.generate(1)
        tmpl = node.getblocktemplate()
        self.log.info("getblocktemplate: Test capability advertised")
        assert 'proposal' in tmpl['capabilities']
        assert 'coinbasetxn' not in tmpl

        coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1)
        # sequence numbers must not be max for nLockTime to have effect
        coinbase_tx.vin[0].nSequence = 2**32 - 2
        coinbase_tx.rehash()

        block = CBlock()
        block.nVersion = tmpl["version"]
        block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
        block.nTime = tmpl["curtime"]
        block.nBits = int(tmpl["bits"], 16)
        block.nNonce = 0
        block.vtx = [coinbase_tx]

        self.log.info("getblocktemplate: Test valid block")
        assert_template(node, block, None)

        self.log.info("submitblock: Test block decode failure")
        assert_raises_rpc_error(-22, "Block decode failed", node.submitblock,
                                b2x(block.serialize()[:-15]))

        self.log.info(
            "getblocktemplate: Test bad input hash for coinbase transaction")
        bad_block = copy.deepcopy(block)
        bad_block.vtx[0].vin[0].prevout.hash += 1
        bad_block.vtx[0].rehash()
        assert_template(node, bad_block, 'bad-cb-missing')

        self.log.info("submitblock: Test invalid coinbase transaction")
        assert_raises_rpc_error(-22, "Block does not start with a coinbase",
                                node.submitblock, b2x(bad_block.serialize()))

        self.log.info("getblocktemplate: Test truncated final transaction")
        assert_raises_rpc_error(-22, "Block decode failed",
                                node.getblocktemplate, {
                                    'data': b2x(block.serialize()[:-1]),
                                    'mode': 'proposal'
                                })

        self.log.info("getblocktemplate: Test duplicate transaction")
        bad_block = copy.deepcopy(block)
        bad_block.vtx.append(bad_block.vtx[0])
        assert_template(node, bad_block, 'bad-txns-duplicate')

        self.log.info("getblocktemplate: Test invalid transaction")
        bad_block = copy.deepcopy(block)
        bad_tx = copy.deepcopy(bad_block.vtx[0])
        bad_tx.vin[0].prevout.hash = 255
        bad_tx.rehash()
        bad_block.vtx.append(bad_tx)
        assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')

        self.log.info("getblocktemplate: Test nonfinal transaction")
        bad_block = copy.deepcopy(block)
        bad_block.vtx[0].nLockTime = 2**32 - 1
        bad_block.vtx[0].rehash()
        assert_template(node, bad_block, 'bad-txns-nonfinal')

        self.log.info("getblocktemplate: Test bad tx count")
        # The tx count is immediately after the block header
        TX_COUNT_OFFSET = 80
        bad_block_sn = bytearray(block.serialize())
        assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1)
        bad_block_sn[TX_COUNT_OFFSET] += 1
        assert_raises_rpc_error(-22, "Block decode failed",
                                node.getblocktemplate, {
                                    'data': b2x(bad_block_sn),
                                    'mode': 'proposal'
                                })

        self.log.info("getblocktemplate: Test bad bits")
        bad_block = copy.deepcopy(block)
        bad_block.nBits = 469762303  # impossible in the real world
        assert_template(node, bad_block, 'bad-diffbits')

        self.log.info("getblocktemplate: Test bad merkle root")
        bad_block = copy.deepcopy(block)
        bad_block.hashMerkleRoot += 1
        assert_template(node, bad_block, 'bad-txnmrklroot', False)

        self.log.info("getblocktemplate: Test bad timestamps")
        bad_block = copy.deepcopy(block)
        bad_block.nTime = 2**31 - 1
        assert_template(node, bad_block, 'time-too-new')
        bad_block.nTime = 0
        assert_template(node, bad_block, 'time-too-old')

        self.log.info("getblocktemplate: Test not best block")
        bad_block = copy.deepcopy(block)
        bad_block.hashPrevBlock = 123
        assert_template(node, bad_block, 'inconclusive-not-best-prevblk')