def get_new_address(self):
     key = ECKey()
     key.generate()
     pubkey = key.get_pubkey().get_bytes()
     address = key_to_p2pkh(pubkey)
     self.priv_keys.append(bytes_to_wif(key.get_bytes()))
     return address
Esempio n. 2
0
    def witness_script_test(self):
        self.log.info("Test signing transaction to P2SH-P2WSH addresses without wallet")
        # Create a new P2SH-P2WSH 1-of-1 multisig address:
        eckey = ECKey()
        eckey.generate()
        embedded_privkey = bytes_to_wif(eckey.get_bytes())
        embedded_pubkey = eckey.get_pubkey().get_bytes().hex()
        p2sh_p2wsh_address = self.nodes[1].createmultisig(1, [embedded_pubkey], "p2sh-segwit")
        # send transaction to P2SH-P2WSH 1-of-1 multisig address
        self.nodes[0].generate(101)
        self.nodes[0].sendtoaddress(p2sh_p2wsh_address["address"], 49.999)
        self.nodes[0].generate(1)
        self.sync_all()
        # Get the UTXO info from scantxoutset
        unspent_output = self.nodes[1].scantxoutset('start', [p2sh_p2wsh_address['descriptor']])['unspents'][0]
        spk = script_to_p2sh_p2wsh_script(p2sh_p2wsh_address['redeemScript']).hex()
        unspent_output['witnessScript'] = p2sh_p2wsh_address['redeemScript']
        unspent_output['redeemScript'] = script_to_p2wsh_script(unspent_output['witnessScript']).hex()
        assert_equal(spk, unspent_output['scriptPubKey'])
        # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
        spending_tx = self.nodes[0].createrawtransaction([unspent_output], {self.nodes[1].get_wallet_rpc(self.default_wallet_name).getnewaddress(): Decimal("49.998")})
        spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output])
        # Check the signing completed successfully
        assert 'complete' in spending_tx_signed
        assert_equal(spending_tx_signed['complete'], True)

        # Now test with P2PKH and P2PK scripts as the witnessScript
        for tx_type in ['P2PKH', 'P2PK']:  # these tests are order-independent
            self.verify_txn_with_witness_script(tx_type)
Esempio n. 3
0
def getnewdestination(address_type='bech32m'):
    """Generate a random destination of the specified type and return the
       corresponding public key, scriptPubKey and address. Supported types are
       'legacy', 'p2sh-segwit', 'bech32' and 'bech32m'. Can be used when a random
       destination is needed, but no compiled wallet is available (e.g. as
       replacement to the getnewaddress/getaddressinfo RPCs)."""
    key = ECKey()
    key.generate()
    pubkey = key.get_pubkey().get_bytes()
    if address_type == 'legacy':
        scriptpubkey = key_to_p2pkh_script(pubkey)
        address = key_to_p2pkh(pubkey)
    elif address_type == 'p2sh-segwit':
        scriptpubkey = key_to_p2sh_p2wpkh_script(pubkey)
        address = key_to_p2sh_p2wpkh(pubkey)
    elif address_type == 'bech32':
        scriptpubkey = key_to_p2wpkh_script(pubkey)
        address = key_to_p2wpkh(pubkey)
    elif address_type == 'bech32m':
        tap = taproot_construct(compute_xonly_pubkey(key.get_bytes())[0])
        pubkey = tap.output_pubkey
        scriptpubkey = tap.scriptPubKey
        address = output_key_to_p2tr(pubkey)
    else:
        assert False
    return pubkey, scriptpubkey, address
    def test_request_invalid_once(self, context):
        node = self.nodes[0]
        privkey = ECKey()
        privkey.generate()

        # Build an invalid proof (no stake)
        no_stake_hex = node.buildavalancheproof(
            42, 2000000000, bytes_to_wif(privkey.get_bytes()), [])
        no_stake = FromHex(LegacyAvalancheProof(), no_stake_hex)
        assert_raises_rpc_error(-8, "The proof is invalid: no-stake",
                                node.verifyavalancheproof, no_stake_hex)

        # Send the proof
        msg = msg_avaproof()
        msg.proof = no_stake
        node.p2ps[0].send_message(msg)

        # Check we get banned
        node.p2ps[0].wait_for_disconnect()

        # Now that the node knows the proof is invalid, it should not be
        # requested anymore
        node.p2ps[1].send_message(
            msg_inv([CInv(t=context.inv_type, h=no_stake.proofid)]))

        # Give enough time for the node to eventually request the proof
        node.setmocktime(
            int(time.time()) + context.constants.getdata_interval + 1)
        node.p2ps[1].sync_with_ping()

        assert all(p.getdata_count == 0 for p in node.p2ps[1:])
Esempio n. 5
0
 def get_keys(self):
     self.pub = []
     self.priv = []
     node0, node1, node2 = self.nodes
     for _ in range(self.nkeys):
         k = ECKey()
         k.generate()
         self.pub.append(k.get_pubkey().get_bytes().hex())
         self.priv.append(bytes_to_wif(k.get_bytes(), k.is_compressed))
     self.final = node2.getnewaddress()
Esempio n. 6
0
        def getProof(stake):
            privkey = ECKey()
            privkey.generate()
            pubkey = privkey.get_pubkey()

            proof_sequence = 11
            proof_expiration = 12
            proof = node.buildavalancheproof(proof_sequence, proof_expiration,
                                             bytes_to_wif(privkey.get_bytes()),
                                             [stake])
            return (pubkey.get_bytes().hex(), proof)
Esempio n. 7
0
 def get_keys(self):
     self.pub = []
     self.priv = []
     node0, node1, node2 = self.nodes
     for _ in range(self.nkeys):
         k = ECKey()
         k.generate()
         self.pub.append(k.get_pubkey().get_bytes().hex())
         self.priv.append(bytes_to_wif(k.get_bytes(), k.is_compressed))
     if self.is_bdb_compiled():
         self.final = node2.getnewaddress()
     else:
         self.final = getnewdestination('bech32')[2]
    def gen_proof(self, node):
        blockhashes = node.generate(10)

        privkey = ECKey()
        privkey.generate()
        pubkey = privkey.get_pubkey()

        stakes = create_coinbase_stakes(node, blockhashes,
                                        node.get_deterministic_priv_key().key)
        proof_hex = node.buildavalancheproof(42, 2000000000,
                                             pubkey.get_bytes().hex(), stakes)

        return bytes_to_wif(privkey.get_bytes()), FromHex(
            AvalancheProof(), proof_hex)
Esempio n. 9
0
def get_generate_key():
    """Generate a fresh key

    Returns a named tuple of privkey, pubkey and all address and scripts."""
    eckey = ECKey()
    eckey.generate()
    privkey = bytes_to_wif(eckey.get_bytes())
    pubkey = eckey.get_pubkey().get_bytes().hex()
    pkh = hash160(hex_str_to_bytes(pubkey))
    return Key(privkey=privkey,
               pubkey=pubkey,
               p2pkh_script=CScript(
                   [OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(),
               p2pkh_addr=key_to_p2pkh(pubkey))
Esempio n. 10
0
    def test_getpeerinfo(self):
        self.log.info("Test getpeerinfo")
        # Create a few getpeerinfo last_block/last_transaction/last_proof
        # values.
        if self.is_wallet_compiled():
            self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1000000)
        tip = self.nodes[1].generate(1)[0]
        self.sync_all()

        stake = create_coinbase_stakes(
            self.nodes[1], [tip],
            self.nodes[1].get_deterministic_priv_key().key)
        privkey = ECKey()
        privkey.generate()
        proof = self.nodes[1].buildavalancheproof(
            42, 2000000000, bytes_to_wif(privkey.get_bytes()), stake)
        self.nodes[1].sendavalancheproof(proof)
        self.sync_proofs()

        time_now = int(time.time())
        peer_info = [x.getpeerinfo() for x in self.nodes]
        # Verify last_block, last_transaction and last_proof keys/values.
        for node, peer, field in product(
                range(self.num_nodes), range(2),
            ['last_block', 'last_transaction', 'last_proof']):
            assert field in peer_info[node][peer].keys()
            if peer_info[node][peer][field] != 0:
                assert_approx(peer_info[node][peer][field], time_now, vspan=60)
        # check both sides of bidirectional connection between nodes
        # the address bound to on one side will be the source address for the
        # other node
        assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr'])
        assert_equal(peer_info[1][0]['addrbind'], peer_info[0][0]['addr'])
        assert_equal(peer_info[0][0]['minfeefilter'], Decimal("5.00"))
        assert_equal(peer_info[1][0]['minfeefilter'], Decimal("10.00"))
        # check the `servicesnames` field
        for info in peer_info:
            assert_net_servicesnames(int(info[0]["services"], 0x10),
                                     info[0]["servicesnames"])

        assert_equal(peer_info[0][0]['connection_type'], 'inbound')
        assert_equal(peer_info[0][1]['connection_type'], 'manual')

        assert_equal(peer_info[1][0]['connection_type'], 'manual')
        assert_equal(peer_info[1][1]['connection_type'], 'inbound')
Esempio n. 11
0
def get_generate_key():
    """Generate a fresh key

    Returns a named tuple of privkey, pubkey and all address and scripts."""
    eckey = ECKey()
    eckey.generate()
    privkey = bytes_to_wif(eckey.get_bytes())
    pubkey = eckey.get_pubkey().get_bytes().hex()
    return Key(privkey=privkey,
               pubkey=pubkey,
               p2pkh_script=key_to_p2pkh_script(pubkey).hex(),
               p2pkh_addr=key_to_p2pkh(pubkey),
               p2wpkh_script=key_to_p2wpkh_script(pubkey).hex(),
               p2wpkh_addr=key_to_p2wpkh(pubkey),
               p2sh_p2wpkh_script=script_to_p2sh_script(
                   key_to_p2wpkh_script(pubkey)).hex(),
               p2sh_p2wpkh_redeem_script=key_to_p2wpkh_script(pubkey).hex(),
               p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
Esempio n. 12
0
 def verify_txn_with_witness_script(self, tx_type):
     self.log.info(
         "Test with a {} script as the witnessScript".format(tx_type))
     eckey = ECKey()
     eckey.generate()
     embedded_privkey = bytes_to_wif(eckey.get_bytes())
     embedded_pubkey = eckey.get_pubkey().get_bytes().hex()
     witness_script = {
         'P2PKH': key_to_p2pkh_script(embedded_pubkey).hex(),
         'P2PK': CScript([hex_str_to_bytes(embedded_pubkey),
                          OP_CHECKSIG]).hex()
     }.get(tx_type, "Invalid tx_type")
     redeem_script = CScript([OP_0,
                              sha256(check_script(witness_script))]).hex()
     addr = script_to_p2sh(redeem_script, prefix=196)
     script_pub_key = self.nodes[1].validateaddress(addr)['scriptPubKey']
     # Fund that address
     txid = self.nodes[0].sendtoaddress(addr, 10)
     vout = find_vout_for_address(self.nodes[0], txid, addr)
     self.nodes[0].generate(1)
     # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
     spending_tx = self.nodes[0].createrawtransaction([{
         'txid': txid,
         'vout': vout
     }], [{
         self.nodes[1].getnewaddress(): Decimal("9.999")
     }, {
         "fee": Decimal("0.001")
     }])
     spending_tx_signed = self.nodes[0].signrawtransactionwithkey(
         spending_tx, [embedded_privkey], [{
             'txid': txid,
             'vout': vout,
             'scriptPubKey': script_pub_key,
             'redeemScript': redeem_script,
             'witnessScript': witness_script,
             'amount': 10
         }])
     # Check the signing completed successfully
     assert 'complete' in spending_tx_signed
     assert_equal(spending_tx_signed['complete'], True)
     self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
def create_fund_and_activation_specific_spending_tx(spend, pre_fork_only):
    # Creates 2 transactions:
    # 1) txfund: create outputs to be used by txspend. Must be valid pre-fork.
    # 2) txspend: spending transaction that is specific to the activation
    #    being used and can be pre-fork-only or post-fork-only, depending on the
    #    function parameter.

    # This specific implementation uses the replay protection mechanism to
    # create transactions that are only valid before or after the fork.

    # Generate a key pair to test
    private_key = ECKey()
    private_key.generate()
    public_key = private_key.get_pubkey().get_bytes()

    # Fund transaction
    script = CScript([public_key, OP_CHECKSIG])
    txfund = create_tx_with_script(spend.tx,
                                   spend.n,
                                   b'',
                                   amount=int(SUBSIDY * COIN),
                                   script_pub_key=script)
    txfund.rehash()

    # Activation specific spending tx
    txspend = CTransaction()
    txspend.vout.append(CTxOut(int(SUBSIDY * COIN) - 1000, CScript([OP_TRUE])))
    txspend.vin.append(CTxIn(COutPoint(txfund.txid, 0), b''))

    # Sign the transaction
    # Use forkvalues that create pre-fork-only or post-fork-only
    # transactions.
    forkvalue = 0 if pre_fork_only else 0xffdead
    sighashtype = (forkvalue << 8) | SIGHASH_ALL | SIGHASH_FORKID
    sighash = SignatureHashForkId(script, txspend, 0, sighashtype,
                                  int(SUBSIDY * COIN))
    sig = private_key.sign_ecdsa(sighash) + \
        bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
    txspend.vin[0].scriptSig = CScript([sig])
    txspend.rehash()

    return txfund, txspend
Esempio n. 14
0
def getnewdestination(address_type='bech32'):
    """Generate a random destination of the specified type and return the
       corresponding public key, scriptPubKey and address. Supported types are
       'legacy', 'p2sh-segwit' and 'bech32'. Can be used when a random
       destination is needed, but no compiled wallet is available (e.g. as
       replacement to the getnewaddress/getaddressinfo RPCs)."""
    key = ECKey()
    key.generate()
    pubkey = key.get_pubkey().get_bytes()
    if address_type == 'legacy':
        scriptpubkey = key_to_p2pkh_script(pubkey)
        address = key_to_p2pkh(pubkey)
    elif address_type == 'p2sh-segwit':
        scriptpubkey = key_to_p2sh_p2wpkh_script(pubkey)
        address = key_to_p2sh_p2wpkh(pubkey)
    elif address_type == 'bech32':
        scriptpubkey = key_to_p2wpkh_script(pubkey)
        address = key_to_p2wpkh(pubkey)
    # TODO: also support bech32m (need to generate x-only-pubkey)
    else:
        assert False
    return pubkey, scriptpubkey, address
    def test_receive_proof(self):
        self.log.info("Test a peer is created on proof reception")

        node = self.nodes[0]
        _, proof = self.gen_proof(node)

        peer = node.add_p2p_connection(P2PInterface())

        msg = msg_avaproof()
        msg.proof = proof
        peer.send_message(msg)

        wait_until(lambda: proof.proofid in get_proof_ids(node))

        self.log.info("Test receiving a proof with missing utxo is orphaned")

        privkey = ECKey()
        privkey.generate()
        orphan_hex = node.buildavalancheproof(
            42, 2000000000,
            privkey.get_pubkey().get_bytes().hex(),
            [{
                'txid': '0' * 64,
                'vout': 0,
                'amount': 10e6,
                'height': 42,
                'iscoinbase': False,
                'privatekey': bytes_to_wif(privkey.get_bytes()),
            }])

        orphan = FromHex(AvalancheProof(), orphan_hex)
        orphan_proofid = "{:064x}".format(orphan.proofid)

        msg = msg_avaproof()
        msg.proof = orphan
        peer.send_message(msg)

        wait_for_proof(node, orphan_proofid, expect_orphan=True)
    def test_orphan_download(self, context):
        node = self.nodes[0]
        privkey = ECKey()
        privkey.generate()
        privkey_wif = bytes_to_wif(privkey.get_bytes())

        # Build a proof with missing utxos so it will be orphaned
        orphan = node.buildavalancheproof(42, 2000000000,
                                          bytes_to_wif(privkey.get_bytes()),
                                          [{
                                              'txid': '0' * 64,
                                              'vout': 0,
                                              'amount': 10e6,
                                              'height': 42,
                                              'iscoinbase': False,
                                              'privatekey': privkey_wif,
                                          }])
        proofid = FromHex(LegacyAvalancheProof(), orphan).proofid
        proofid_hex = "{:064x}".format(proofid)

        self.restart_node(0,
                          extra_args=self.extra_args[0] + [
                              "-avaproof={}".format(orphan),
                              "-avamasterkey={}".format(privkey_wif),
                          ])
        node.generate(1)
        wait_for_proof(node, proofid_hex, expect_orphan=True)

        peer = node.add_p2p_connection(context.p2p_conn())
        peer.send_message(msg_inv([CInv(t=context.inv_type, h=proofid)]))

        # Give enough time for the node to eventually request the proof.
        node.setmocktime(
            int(time.time()) + context.constants.getdata_interval + 1)
        peer.sync_with_ping()

        assert_equal(peer.getdata_count, 0)
Esempio n. 17
0
def generate_wif_key():
    # Makes a WIF privkey for imports
    k = ECKey()
    k.generate()
    return bytes_to_wif(k.get_bytes(), k.is_compressed)
Esempio n. 18
0
def random_p2wpkh():
    """Generate a random P2WPKH scriptPubKey. Can be used when a random destination is needed,
    but no compiled wallet is available (e.g. as replacement to the getnewaddress RPC)."""
    key = ECKey()
    key.generate()
    return key_to_p2wpkh_script(key.get_pubkey().get_bytes())
Esempio n. 19
0
    def run_test(self):
        node = self.nodes[0]
        node.add_p2p_connection(P2PDataStore())
        node.setmocktime(REPLAY_PROTECTION_START_TIME)

        self.genesis_hash = int(node.getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        spendable_outputs = []

        # save the current tip so it can be spent by a later block
        def save_spendable_output():
            spendable_outputs.append(self.tip)

        # get an output that we previously marked as spendable
        def get_spendable_output():
            return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)

        # move the tip back to a previous block
        def tip(number):
            self.tip = self.blocks[number]

        # adds transactions to the block and updates state
        def update_block(block_number, new_transactions):
            block = self.blocks[block_number]
            block.vtx.extend(new_transactions)
            old_sha256 = block.sha256
            make_conform_to_ctor(block)
            block.hashMerkleRoot = block.calc_merkle_root()
            block.solve()
            # Update the internal state just like in next_block
            self.tip = block
            if block.sha256 != old_sha256:
                self.block_heights[
                    block.sha256] = self.block_heights[old_sha256]
                del self.block_heights[old_sha256]
            self.blocks[block_number] = block
            return block

        # shorthand
        block = self.next_block

        # Create a new block
        block(0)
        save_spendable_output()
        node.p2p.send_blocks_and_test([self.tip], node)

        # Now we need that block to mature so we can spend the coinbase.
        maturity_blocks = []
        for i in range(99):
            block(5000 + i)
            maturity_blocks.append(self.tip)
            save_spendable_output()
        node.p2p.send_blocks_and_test(maturity_blocks, node)

        # collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(100):
            out.append(get_spendable_output())

        # Generate a key pair to test P2SH sigops count
        private_key = ECKey()
        private_key.generate()
        public_key = private_key.get_pubkey().get_bytes()

        # This is a little handier to use than the version in blocktools.py
        def create_fund_and_spend_tx(spend, forkvalue=0):
            # Fund transaction
            script = CScript([public_key, OP_CHECKSIG])
            txfund = create_tx_with_script(spend.tx,
                                           spend.n,
                                           b'',
                                           amount=50 * COIN - 1000,
                                           script_pub_key=script)
            txfund.rehash()

            # Spend transaction
            txspend = CTransaction()
            txspend.vout.append(CTxOut(50 * COIN - 2000, CScript([OP_TRUE])))
            txspend.vin.append(CTxIn(COutPoint(txfund.sha256, 0), b''))

            # Sign the transaction
            sighashtype = (forkvalue << 8) | SIGHASH_ALL | SIGHASH_FORKID
            sighash = SignatureHashForkId(script, txspend, 0, sighashtype,
                                          50 * COIN - 1000)
            sig = private_key.sign_ecdsa(sighash) + \
                bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))
            txspend.vin[0].scriptSig = CScript([sig])
            txspend.rehash()

            return [txfund, txspend]

        def send_transaction_to_mempool(tx):
            tx_id = node.sendrawtransaction(ToHex(tx))
            assert tx_id in set(node.getrawmempool())
            return tx_id

        # Before the fork, no replay protection required to get in the mempool.
        txns = create_fund_and_spend_tx(out[0])
        send_transaction_to_mempool(txns[0])
        send_transaction_to_mempool(txns[1])

        # And txns get mined in a block properly.
        block(1)
        update_block(1, txns)
        node.p2p.send_blocks_and_test([self.tip], node)

        # Replay protected transactions are rejected.
        replay_txns = create_fund_and_spend_tx(out[1], 0xffdead)
        send_transaction_to_mempool(replay_txns[0])
        assert_raises_rpc_error(-26, RPC_INVALID_SIGNATURE_ERROR,
                                node.sendrawtransaction, ToHex(replay_txns[1]))

        # And block containing them are rejected as well.
        block(2)
        update_block(2, replay_txns)
        node.p2p.send_blocks_and_test([self.tip],
                                      node,
                                      success=False,
                                      reject_reason='blk-bad-inputs')

        # Rewind bad block
        tip(1)

        # Create a block that would activate the replay protection.
        bfork = block(5555)
        bfork.nTime = REPLAY_PROTECTION_START_TIME - 1
        update_block(5555, [])
        node.p2p.send_blocks_and_test([self.tip], node)

        activation_blocks = []
        for i in range(5):
            block(5100 + i)
            activation_blocks.append(self.tip)
        node.p2p.send_blocks_and_test(activation_blocks, node)

        # Check we are just before the activation time
        assert_equal(node.getblockchaininfo()['mediantime'],
                     REPLAY_PROTECTION_START_TIME - 1)

        # We are just before the fork, replay protected txns still are rejected
        assert_raises_rpc_error(-26, RPC_INVALID_SIGNATURE_ERROR,
                                node.sendrawtransaction, ToHex(replay_txns[1]))

        block(3)
        update_block(3, replay_txns)
        node.p2p.send_blocks_and_test([self.tip],
                                      node,
                                      success=False,
                                      reject_reason='blk-bad-inputs')

        # Rewind bad block
        tip(5104)

        # Send some non replay protected txns in the mempool to check
        # they get cleaned at activation.
        txns = create_fund_and_spend_tx(out[2])
        send_transaction_to_mempool(txns[0])
        tx_id = send_transaction_to_mempool(txns[1])

        # Activate the replay protection
        block(5556)
        node.p2p.send_blocks_and_test([self.tip], node)

        # Check we just activated the replay protection
        assert_equal(node.getblockchaininfo()['mediantime'],
                     REPLAY_PROTECTION_START_TIME)

        # Non replay protected transactions are not valid anymore,
        # so they should be removed from the mempool.
        assert tx_id not in set(node.getrawmempool())

        # Good old transactions are now invalid.
        send_transaction_to_mempool(txns[0])
        assert_raises_rpc_error(-26, RPC_INVALID_SIGNATURE_ERROR,
                                node.sendrawtransaction, ToHex(txns[1]))

        # They also cannot be mined
        block(4)
        update_block(4, txns)
        node.p2p.send_blocks_and_test([self.tip],
                                      node,
                                      success=False,
                                      reject_reason='blk-bad-inputs')

        # Rewind bad block
        tip(5556)

        # The replay protected transaction is now valid
        replay_tx0_id = send_transaction_to_mempool(replay_txns[0])
        replay_tx1_id = send_transaction_to_mempool(replay_txns[1])

        # Make sure the transaction are ready to be mined.
        tmpl = node.getblocktemplate()

        found_id0 = False
        found_id1 = False

        for txn in tmpl['transactions']:
            txid = txn['txid']
            if txid == replay_tx0_id:
                found_id0 = True
            elif txid == replay_tx1_id:
                found_id1 = True

        assert found_id0 and found_id1

        # And the mempool is still in good shape.
        assert replay_tx0_id in set(node.getrawmempool())
        assert replay_tx1_id in set(node.getrawmempool())

        # They also can also be mined
        block(5)
        update_block(5, replay_txns)
        node.p2p.send_blocks_and_test([self.tip], node)

        # Ok, now we check if a reorg work properly across the activation.
        postforkblockid = node.getbestblockhash()
        node.invalidateblock(postforkblockid)
        assert replay_tx0_id in set(node.getrawmempool())
        assert replay_tx1_id in set(node.getrawmempool())

        # Deactivating replay protection.
        forkblockid = node.getbestblockhash()
        node.invalidateblock(forkblockid)
        # The funding tx is not evicted from the mempool, since it's valid in
        # both sides of the fork
        assert replay_tx0_id in set(node.getrawmempool())
        assert replay_tx1_id not in set(node.getrawmempool())

        # Check that we also do it properly on deeper reorg.
        node.reconsiderblock(forkblockid)
        node.reconsiderblock(postforkblockid)
        node.invalidateblock(forkblockid)
        assert replay_tx0_id in set(node.getrawmempool())
        assert replay_tx1_id not in set(node.getrawmempool())
    def run_test(self):
        self.log.info("Mining blocks...")
        self.nodes[0].generate(101)

        self.sync_all()

        # address
        address1 = self.nodes[0].getnewaddress()
        # pubkey
        address2 = self.nodes[0].getnewaddress()
        # privkey
        eckey = ECKey()
        eckey.generate()
        address3_privkey = bytes_to_wif(eckey.get_bytes())
        address3 = key_to_p2wpkh(eckey.get_pubkey().get_bytes())
        self.nodes[0].importprivkey(address3_privkey)

        # Check only one address
        address_info = self.nodes[0].getaddressinfo(address1)
        assert_equal(address_info['ismine'], True)

        self.sync_all()

        # Node 1 sync test
        assert_equal(self.nodes[1].getblockcount(), 101)

        # Address Test - before import
        address_info = self.nodes[1].getaddressinfo(address1)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        address_info = self.nodes[1].getaddressinfo(address2)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        address_info = self.nodes[1].getaddressinfo(address3)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        # Send funds to self
        txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
        self.nodes[0].generate(1)
        rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
        proof1 = self.nodes[0].gettxoutproof([txnid1])

        txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
        self.nodes[0].generate(1)
        rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
        proof2 = self.nodes[0].gettxoutproof([txnid2])

        txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
        self.nodes[0].generate(1)
        rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
        proof3 = self.nodes[0].gettxoutproof([txnid3])

        self.sync_all()

        # Import with no affiliated address
        assert_raises_rpc_error(-5, "No addresses",
                                self.nodes[1].importprunedfunds, rawtxn1,
                                proof1)

        balance1 = self.nodes[1].getbalance()
        assert_equal(balance1, Decimal(0))

        # Import with affiliated address with no rescan
        self.nodes[1].createwallet('wwatch', disable_private_keys=True)
        wwatch = self.nodes[1].get_wallet_rpc('wwatch')
        wwatch.importaddress(address=address2, rescan=False)
        wwatch.importprunedfunds(rawtransaction=rawtxn2, txoutproof=proof2)
        assert [
            tx for tx in wwatch.listtransactions(include_watchonly=True)
            if tx['txid'] == txnid2
        ]

        # Import with private key with no rescan
        w1 = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
        w1.importprivkey(privkey=address3_privkey, rescan=False)
        w1.importprunedfunds(rawtxn3, proof3)
        assert [tx for tx in w1.listtransactions() if tx['txid'] == txnid3]
        balance3 = w1.getbalance()
        assert_equal(balance3, Decimal('0.025'))

        # Addresses Test - after import
        address_info = w1.getaddressinfo(address1)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)
        address_info = wwatch.getaddressinfo(address2)
        if self.options.descriptors:
            assert_equal(address_info['iswatchonly'], False)
            assert_equal(address_info['ismine'], True)
        else:
            assert_equal(address_info['iswatchonly'], True)
            assert_equal(address_info['ismine'], False)
        address_info = w1.getaddressinfo(address3)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], True)

        # Remove transactions
        assert_raises_rpc_error(-8, "Transaction does not exist in wallet.",
                                w1.removeprunedfunds, txnid1)
        assert not [
            tx for tx in w1.listtransactions(include_watchonly=True)
            if tx['txid'] == txnid1
        ]

        wwatch.removeprunedfunds(txnid2)
        assert not [
            tx for tx in wwatch.listtransactions(include_watchonly=True)
            if tx['txid'] == txnid2
        ]

        w1.removeprunedfunds(txnid3)
        assert not [
            tx for tx in w1.listtransactions(include_watchonly=True)
            if tx['txid'] == txnid3
        ]
Esempio n. 21
0
    def test_double_spend(self):
        '''
        This tests the case where the same UTXO is spent twice on two separate
        blocks as part of a reorg.

             ab0
          /       \
        aa1 [tx1]   bb1 [tx2]
         |           |
        aa2         bb2
         |           |
        aa3         bb3
                     |
                    bb4

        Problematic case:

        1. User 1 receives XEP in tx1 from utxo1 in block aa1.
        2. User 2 receives XEP in tx2 from utxo1 (same) in block bb1
        3. User 1 sees 2 confirmations at block aa3.
        4. Reorg into bb chain.
        5. User 1 asks `listsinceblock aa3` and does not see that tx1 is now
           invalidated.

        Currently the solution to this is to detect that a reorg'd block is
        asked for in listsinceblock, and to iterate back over existing blocks up
        until the fork point, and to include all transactions that relate to the
        node wallet.
        '''
        self.log.info("Test double spend")

        self.sync_all()

        # share utxo between nodes[1] and nodes[2]
        eckey = ECKey()
        eckey.generate()
        privkey = bytes_to_wif(eckey.get_bytes())
        address = key_to_p2wpkh(eckey.get_pubkey().get_bytes())
        self.nodes[2].sendtoaddress(address, 10)
        self.nodes[2].generate(6)
        self.sync_all()
        self.nodes[2].importprivkey(privkey)
        utxos = self.nodes[2].listunspent()
        utxo = [u for u in utxos if u["address"] == address][0]
        self.nodes[1].importprivkey(privkey)

        # Split network into two
        self.split_network()

        # send from nodes[1] using utxo to nodes[0]
        change = '%.8f' % (float(utxo['amount']) - 1.0003)
        recipient_dict = {
            self.nodes[0].getnewaddress(): 1,
            self.nodes[1].getnewaddress(): change,
        }
        utxo_dicts = [{
            'txid': utxo['txid'],
            'vout': utxo['vout'],
        }]
        txid1 = self.nodes[1].sendrawtransaction(
            self.nodes[1].signrawtransactionwithwallet(
                self.nodes[1].createrawtransaction(utxo_dicts,
                                                   recipient_dict))['hex'])

        # send from nodes[2] using utxo to nodes[3]
        recipient_dict2 = {
            self.nodes[3].getnewaddress(): 1,
            self.nodes[2].getnewaddress(): change,
        }
        self.nodes[2].sendrawtransaction(
            self.nodes[2].signrawtransactionwithwallet(
                self.nodes[2].createrawtransaction(utxo_dicts,
                                                   recipient_dict2))['hex'])

        # generate on both sides
        lastblockhash = self.nodes[1].generate(3)[2]
        self.nodes[2].generate(4)

        self.join_network()

        self.sync_all()

        # gettransaction should work for txid1
        assert self.nodes[0].gettransaction(
            txid1)['txid'] == txid1, "gettransaction failed to find txid1"

        # listsinceblock(lastblockhash) should now include txid1, as seen from nodes[0]
        lsbres = self.nodes[0].listsinceblock(lastblockhash)
        assert any(tx['txid'] == txid1 for tx in lsbres['removed'])

        # but it should not include 'removed' if include_removed=false
        lsbres2 = self.nodes[0].listsinceblock(blockhash=lastblockhash,
                                               include_removed=False)
        assert 'removed' not in lsbres2
 def gen_privkey():
     pk = ECKey()
     pk.generate()
     return pk
    def run_test(self):
        # Turn off node 1 while node 0 mines blocks to generate stakes,
        # so that we can later try starting node 1 with an orphan proof.
        self.stop_node(1)

        node = self.nodes[0]

        addrkey0 = node.get_deterministic_priv_key()
        blockhashes = node.generatetoaddress(100, addrkey0.address)

        self.log.info(
            "Make build a valid proof and restart the node to use it")
        privkey = ECKey()
        privkey.set(
            bytes.fromhex(
                "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747"
            ), True)
        wif_privkey = bytes_to_wif(privkey.get_bytes())

        def get_hex_pubkey(privkey):
            return privkey.get_pubkey().get_bytes().hex()

        proof_master = get_hex_pubkey(privkey)
        proof_sequence = 11
        proof_expiration = 12
        stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key)
        proof = node.buildavalancheproof(proof_sequence, proof_expiration,
                                         wif_privkey, stakes)

        self.log.info("Test decodeavalancheproof RPC")
        proofobj = FromHex(LegacyAvalancheProof(), proof)
        decodedproof = node.decodeavalancheproof(proof)
        limited_id_hex = f"{proofobj.limited_proofid:0{64}x}"
        assert_equal(decodedproof["sequence"], proof_sequence)
        assert_equal(decodedproof["expiration"], proof_expiration)
        assert_equal(decodedproof["master"], proof_master)
        assert_equal(decodedproof["payoutscript"]["hex"], "")
        assert "signature" not in decodedproof.keys()
        assert_equal(decodedproof["proofid"], f"{proofobj.proofid:0{64}x}")
        assert_equal(decodedproof["limitedid"], limited_id_hex)
        assert_equal(decodedproof["stakes"][0]["txid"], stakes[0]["txid"])
        assert_equal(decodedproof["stakes"][0]["vout"], stakes[0]["vout"])
        assert_equal(decodedproof["stakes"][0]["height"], stakes[0]["height"])
        assert_equal(decodedproof["stakes"][0]["iscoinbase"],
                     stakes[0]["iscoinbase"])
        assert_equal(decodedproof["stakes"][0]["signature"],
                     base64.b64encode(proofobj.stakes[0].sig).decode("ascii"))

        # Invalid hex (odd number of hex digits)
        assert_raises_rpc_error(-22, "Proof must be an hexadecimal string",
                                node.decodeavalancheproof, proof[:-1])
        # Valid hex but invalid proof
        assert_raises_rpc_error(-22, "Proof has invalid format",
                                node.decodeavalancheproof, proof[:-2])

        self.log.info(
            "Testing decodeavalancheproof with legacyavaproof disabled")
        self.restart_node(0, self.extra_args[0] + ["-legacyavaproof=0"])

        regular_proof = node.buildavalancheproof(proof_sequence,
                                                 proof_expiration, wif_privkey,
                                                 stakes,
                                                 ADDRESS_ECREG_UNSPENDABLE)
        decoded_regular_proof = node.decodeavalancheproof(regular_proof)

        assert_equal(decoded_regular_proof["sequence"],
                     decodedproof["sequence"])
        assert_equal(decoded_regular_proof["expiration"],
                     decodedproof["expiration"])
        assert_equal(decoded_regular_proof["master"], decodedproof["master"])
        assert_equal(
            decoded_regular_proof["payoutscript"], {
                "asm":
                "OP_DUP OP_HASH160 0000000000000000000000000000000000000000 OP_EQUALVERIFY OP_CHECKSIG",
                "hex": "76a914000000000000000000000000000000000000000088ac",
                "reqSigs": 1,
                "type": "pubkeyhash",
                "addresses": [ADDRESS_ECREG_UNSPENDABLE],
            })

        regular_proof_obj = FromHex(AvalancheProof(), regular_proof)
        assert_equal(
            decoded_regular_proof["signature"],
            base64.b64encode(regular_proof_obj.signature).decode("ascii"))
        assert_equal(decoded_regular_proof["proofid"],
                     f"{regular_proof_obj.proofid:0{64}x}")
        assert_equal(decoded_regular_proof["limitedid"],
                     f"{regular_proof_obj.limited_proofid:0{64}x}")

        assert_equal(decoded_regular_proof["stakes"][0]["txid"],
                     decodedproof["stakes"][0]["txid"])
        assert_equal(decoded_regular_proof["stakes"][0]["vout"],
                     decodedproof["stakes"][0]["vout"])
        assert_equal(decoded_regular_proof["stakes"][0]["height"],
                     decodedproof["stakes"][0]["height"])
        assert_equal(decoded_regular_proof["stakes"][0]["iscoinbase"],
                     decodedproof["stakes"][0]["iscoinbase"])
        assert_equal(
            decoded_regular_proof["stakes"][0]["signature"],
            base64.b64encode(regular_proof_obj.stakes[0].sig).decode("ascii"))

        # Restart the node with this proof
        self.restart_node(
            0, self.extra_args[0] + [
                "-avaproof={}".format(proof),
                "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN",
            ])

        self.log.info("The proof is registered at first chaintip update")
        assert_equal(len(node.getavalanchepeerinfo()), 0)
        node.generate(1)
        self.wait_until(lambda: len(node.getavalanchepeerinfo()) == 1,
                        timeout=5)

        # This case will occur for users building proofs with a third party
        # tool and then starting a new node that is not yet aware of the
        # transactions used for stakes.
        self.log.info("Start a node with an orphan proof")

        self.start_node(
            1, self.extra_args[0] + [
                "-avaproof={}".format(proof),
                "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN",
            ])
        # Mine a block to trigger an attempt at registering the proof
        self.nodes[1].generate(1)
        wait_for_proof(self.nodes[1],
                       f"{proofobj.proofid:0{64}x}",
                       expect_orphan=True)

        self.log.info("Connect to an up-to-date node to unorphan the proof")
        self.connect_nodes(1, node.index)
        self.sync_all()
        wait_for_proof(self.nodes[1],
                       f"{proofobj.proofid:0{64}x}",
                       expect_orphan=False)

        self.log.info("Generate delegations for the proof")

        # Stack up a few delegation levels
        def gen_privkey():
            pk = ECKey()
            pk.generate()
            return pk

        delegator_privkey = privkey
        delegation = None
        for _ in range(10):
            delegated_privkey = gen_privkey()
            delegation = node.delegateavalancheproof(
                limited_id_hex,
                bytes_to_wif(delegator_privkey.get_bytes()),
                get_hex_pubkey(delegated_privkey),
                delegation,
            )
            delegator_privkey = delegated_privkey

        random_privkey = gen_privkey()
        random_pubkey = get_hex_pubkey(random_privkey)

        # Invalid proof
        no_stake = node.buildavalancheproof(proof_sequence, proof_expiration,
                                            wif_privkey, [])

        # Invalid privkey
        assert_raises_rpc_error(
            -5,
            "The private key is invalid",
            node.delegateavalancheproof,
            limited_id_hex,
            bytes_to_wif(bytes(32)),
            random_pubkey,
        )

        # Invalid delegation
        bad_dg = AvalancheDelegation()
        assert_raises_rpc_error(
            -8,
            "The delegation does not match the proof",
            node.delegateavalancheproof,
            limited_id_hex,
            bytes_to_wif(privkey.get_bytes()),
            random_pubkey,
            bad_dg.serialize().hex(),
        )

        # Still invalid, but with a matching proofid
        bad_dg.limited_proofid = proofobj.limited_proofid
        bad_dg.proof_master = proofobj.master
        bad_dg.levels = [AvalancheDelegationLevel()]
        assert_raises_rpc_error(
            -8,
            "The delegation is invalid",
            node.delegateavalancheproof,
            limited_id_hex,
            bytes_to_wif(privkey.get_bytes()),
            random_pubkey,
            bad_dg.serialize().hex(),
        )

        # Wrong privkey, match the proof but does not match the delegation
        assert_raises_rpc_error(
            -5,
            "The private key does not match the delegation",
            node.delegateavalancheproof,
            limited_id_hex,
            bytes_to_wif(privkey.get_bytes()),
            random_pubkey,
            delegation,
        )

        # Delegation not hex
        assert_raises_rpc_error(
            -22,
            "Delegation must be an hexadecimal string.",
            node.delegateavalancheproof,
            limited_id_hex,
            bytes_to_wif(privkey.get_bytes()),
            random_pubkey,
            "f00",
        )
        # Delegation is hex but ill-formed
        assert_raises_rpc_error(
            -22,
            "Delegation has invalid format",
            node.delegateavalancheproof,
            limited_id_hex,
            bytes_to_wif(privkey.get_bytes()),
            random_pubkey,
            "dead",
        )

        # Test invalid proofs
        dust = node.buildavalancheproof(
            proof_sequence, proof_expiration, wif_privkey,
            create_coinbase_stakes(node, [blockhashes[0]],
                                   addrkey0.key,
                                   amount="0"))

        dust2 = node.buildavalancheproof(
            proof_sequence, proof_expiration, wif_privkey,
            create_coinbase_stakes(
                node, [blockhashes[0]],
                addrkey0.key,
                amount=f"{PROOF_DUST_THRESHOLD * 0.9999:.2f}"))

        missing_stake = node.buildavalancheproof(
            proof_sequence, proof_expiration, wif_privkey,
            [{
                'txid': '0' * 64,
                'vout': 0,
                'amount': 10000000,
                'height': 42,
                'iscoinbase': False,
                'privatekey': addrkey0.key,
            }])

        duplicate_stake = (
            "0b000000000000000c0000000000000021030b4c866585dd868"
            "a9d62348a9cd008d6a312937048fff31670e7e920cfc7a74402"
            "05c5f72f5d6da3085583e75ee79340eb4eff208c89988e7ed0e"
            "fb30b87298fa30000000000f2052a0100000003000000210227"
            "d85ba011276cf25b51df6a188b75e604b38770a462b2d0e9fb2"
            "fc839ef5d3f86076def2e8bc3c40671c1a0eb505da5857a950a"
            "0cf4625a80018cdd75ac62e61273ff8142f747de67e73f6368c"
            "8648942b0ef6c065d72a81ad7438a23c11cca05c5f72f5d6da3"
            "085583e75ee79340eb4eff208c89988e7ed0efb30b87298fa30"
            "000000000f2052a0100000003000000210227d85ba011276cf2"
            "5b51df6a188b75e604b38770a462b2d0e9fb2fc839ef5d3f860"
            "76def2e8bc3c40671c1a0eb505da5857a950a0cf4625a80018c"
            "dd75ac62e61273ff8142f747de67e73f6368c8648942b0ef6c0"
            "65d72a81ad7438a23c11cca")

        bad_sig = (
            "0b000000000000000c0000000000000021030b4c866585dd868a9d62348"
            "a9cd008d6a312937048fff31670e7e920cfc7a7440105c5f72f5d6da3085"
            "583e75ee79340eb4eff208c89988e7ed0efb30b87298fa30000000000f20"
            "52a0100000003000000210227d85ba011276cf25b51df6a188b75e604b3"
            "8770a462b2d0e9fb2fc839ef5d3faf07f001dd38e9b4a43d07d5d449cc0"
            "f7d2888d96b82962b3ce516d1083c0e031773487fc3c4f2e38acd1db974"
            "1321b91a79b82d1c2cfd47793261e4ba003cf5")

        wrong_order = (
            "c964aa6fde575e4ce8404581c7be874e21023beefdde700a6bc0203"
            "6335b4df141c8bc67bb05a971f5ac2745fd683797dde30305d427b7"
            "06705a5d4b6a368a231d6db62abacf8c29bc32b61e7f65a0a6976aa"
            "8b86b687bc0260e821e4f0200b9d3bf6d2102449fb5237efe8f647d"
            "32e8b64f06c22d1d40368eaca2a71ffc6a13ecc8bce68052365271b"
            "6c71189f5cd7e3b694b77b579080f0b35bae567b96590ab6aa3019b"
            "018ff9f061f52f1426bdb195d4b6d4dff5114cee90e33dabf0c588e"
            "badf7774418f54247f6390791706af36fac782302479898b5273f9e"
            "51a92cb1fb5af43deeb6c8c269403d30ffcb380300134398c42103e"
            "49f9df52de2dea81cf7838b82521b69f2ea360f1c4eed9e6c89b7d0"
            "f9e645efa08e97ea0c60e1f0a064fbf08989c084707082727e85dcb"
            "9f79bb503f76ee6c8dad42a07ef15c89b3750a5631d604b21fafff0"
            "f4de354ade95c2f28160ae549af0d4ce48c4ca9d0714b1fa5192027"
            "0f8575e0af610f07b4e602a018ecdbb649b64fff614c0026e9fc8e0"
            "030092533d422103aac52f4cfca700e7e9824298e0184755112e32f"
            "359c832f5f6ad2ef62a2c024af812d6d7f2ecc6223a774e19bce1fb"
            "20d94d6b01ea693638f55c74fdaa5358fa9239d03e4caf3d817e8f7"
            "48ccad55a27b9d365db06ad5a0b779ac385f3dc8710")

        self.log.info(
            "Check the verifyavalancheproof and sendavalancheproof RPCs")

        if self.is_wallet_compiled():
            self.log.info(
                "Check a proof with the maximum number of UTXO is valid")
            new_blocks = node.generate(AVALANCHE_MAX_PROOF_STAKES // 10 + 1)
            # confirm the coinbase UTXOs
            node.generate(101)
            too_many_stakes = create_stakes(node, new_blocks,
                                            AVALANCHE_MAX_PROOF_STAKES + 1)
            maximum_stakes = too_many_stakes[:-1]

            good_proof = node.buildavalancheproof(proof_sequence,
                                                  proof_expiration,
                                                  wif_privkey, maximum_stakes)

            too_many_utxos = node.buildavalancheproof(proof_sequence,
                                                      proof_expiration,
                                                      wif_privkey,
                                                      too_many_stakes)

            assert node.verifyavalancheproof(good_proof)

        for rpc in [node.verifyavalancheproof, node.sendavalancheproof]:
            assert_raises_rpc_error(-22, "Proof must be an hexadecimal string",
                                    rpc, "f00")
            assert_raises_rpc_error(-22, "Proof has invalid format", rpc,
                                    "f00d")

            def check_rpc_failure(proof, message):
                assert_raises_rpc_error(-8, "The proof is invalid: " + message,
                                        rpc, proof)

            check_rpc_failure(no_stake, "no-stake")
            check_rpc_failure(dust, "amount-below-dust-threshold")
            check_rpc_failure(duplicate_stake, "duplicated-stake")
            check_rpc_failure(missing_stake, "utxo-missing-or-spent")
            check_rpc_failure(bad_sig, "invalid-stake-signature")
            check_rpc_failure(wrong_order, "wrong-stake-ordering")
            if self.is_wallet_compiled():
                check_rpc_failure(too_many_utxos, "too-many-utxos")

        conflicting_utxo = node.buildavalancheproof(proof_sequence + 1,
                                                    proof_expiration,
                                                    wif_privkey, stakes)
        assert_raises_rpc_error(
            -8, "The proof has conflicting utxo with an existing proof",
            node.sendavalancheproof, conflicting_utxo)

        # Clear the proof pool
        self.restart_node(0)

        # Good proof
        assert node.verifyavalancheproof(proof)

        peer = node.add_p2p_connection(P2PInterface())

        proofid = FromHex(LegacyAvalancheProof(), proof).proofid
        node.sendavalancheproof(proof)
        assert proofid in get_proof_ids(node)

        def inv_found():
            with p2p_lock:
                return peer.last_message.get(
                    "inv") and peer.last_message["inv"].inv[-1].hash == proofid

        self.wait_until(inv_found)

        self.log.info("Check the getrawproof RPC")

        raw_proof = node.getrawavalancheproof("{:064x}".format(proofid))
        assert_equal(raw_proof['proof'], proof)
        assert_equal(raw_proof['orphan'], False)

        assert_raises_rpc_error(-8, "Proof not found",
                                node.getrawavalancheproof, '0' * 64)

        # Orphan the proof by sending the stake
        raw_tx = node.createrawtransaction([{
            "txid": stakes[-1]["txid"],
            "vout": 0
        }], {
            ADDRESS_ECREG_UNSPENDABLE:
            stakes[-1]["amount"] - Decimal('10000')
        })
        signed_tx = node.signrawtransactionwithkey(raw_tx, [addrkey0.key])
        node.sendrawtransaction(signed_tx["hex"])
        node.generate(1)
        self.wait_until(lambda: proofid not in get_proof_ids(node))

        raw_proof = node.getrawavalancheproof("{:064x}".format(proofid))
        assert_equal(raw_proof['proof'], proof)
        assert_equal(raw_proof['orphan'], True)

        self.log.info("Bad proof should be rejected at startup")

        self.stop_node(0)

        node.assert_start_raises_init_error(
            self.extra_args[0] + [
                "-avasessionkey=0",
            ],
            expected_msg="Error: The avalanche session key is invalid.",
        )

        node.assert_start_raises_init_error(
            self.extra_args[0] + [
                "-avaproof={}".format(proof),
            ],
            expected_msg=
            "Error: The avalanche master key is missing for the avalanche proof.",
        )

        node.assert_start_raises_init_error(
            self.extra_args[0] + [
                "-avaproof={}".format(proof),
                "-avamasterkey=0",
            ],
            expected_msg="Error: The avalanche master key is invalid.",
        )

        def check_proof_init_error(proof, message):
            node.assert_start_raises_init_error(
                self.extra_args[0] + [
                    "-avaproof={}".format(proof),
                    "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN",
                ],
                expected_msg="Error: " + message,
            )

        check_proof_init_error(no_stake, "The avalanche proof has no stake.")
        check_proof_init_error(dust, "The avalanche proof stake is too low.")
        check_proof_init_error(dust2, "The avalanche proof stake is too low.")
        check_proof_init_error(duplicate_stake,
                               "The avalanche proof has duplicated stake.")
        check_proof_init_error(
            bad_sig, "The avalanche proof has invalid stake signatures.")
        if self.is_wallet_compiled():
            # The too many utxos case creates a proof which is that large that it
            # cannot fit on the command line
            append_config(node.datadir, ["avaproof={}".format(too_many_utxos)])
            node.assert_start_raises_init_error(
                self.extra_args[0] + [
                    "-avamasterkey=cND2ZvtabDbJ1gucx9GWH6XT9kgTAqfb6cotPt5Q5CyxVDhid2EN",
                ],
                expected_msg="Error: The avalanche proof has too many utxos.",
                match=ErrorMatch.PARTIAL_REGEX,
            )

        # Master private key mismatch
        random_privkey = ECKey()
        random_privkey.generate()
        node.assert_start_raises_init_error(
            self.extra_args[0] + [
                "-avaproof={}".format(proof),
                "-avamasterkey={}".format(
                    bytes_to_wif(random_privkey.get_bytes())),
            ],
            expected_msg=
            "Error: The master key does not match the proof public key.",
        )

        self.log.info("Bad delegation should be rejected at startup")

        def check_delegation_init_error(delegation, message):
            node.assert_start_raises_init_error(
                self.extra_args[0] + [
                    "-avadelegation={}".format(delegation),
                    "-avaproof={}".format(proof),
                    "-avamasterkey={}".format(
                        bytes_to_wif(delegated_privkey.get_bytes())),
                ],
                expected_msg="Error: " + message,
            )

        check_delegation_init_error(
            AvalancheDelegation().serialize().hex(),
            "The delegation does not match the proof.")

        bad_level_sig = FromHex(AvalancheDelegation(), delegation)
        # Tweak some key to cause the signature to mismatch
        bad_level_sig.levels[-2].pubkey = bytes.fromhex(proof_master)
        check_delegation_init_error(
            bad_level_sig.serialize().hex(),
            "The avalanche delegation has invalid signatures.")

        node.assert_start_raises_init_error(
            self.extra_args[0] + [
                "-avadelegation={}".format(delegation),
                "-avaproof={}".format(proof),
                "-avamasterkey={}".format(
                    bytes_to_wif(random_privkey.get_bytes())),
            ],
            expected_msg=
            "Error: The master key does not match the delegation public key.",
        )
Esempio n. 24
0
class FullBlockTest(BitcoinTestFramework):
    def set_test_params(self):
        self.num_nodes = 1
        self.setup_clean_chain = True
        # This is a consensus block test, we don't care about tx policy
        self.extra_args = [['-noparkdeepreorg',
                            '-maxreorgdepth=-1', '-acceptnonstdtxn=1']]

    def run_test(self):
        node = self.nodes[0]  # convenience reference to the node

        self.bootstrap_p2p()  # Add one p2p connection to the node

        self.block_heights = {}
        self.coinbase_key = ECKey()
        self.coinbase_key.generate()
        self.coinbase_pubkey = self.coinbase_key.get_pubkey().get_bytes()
        self.tip = None
        self.blocks = {}
        self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
        self.block_heights[self.genesis_hash] = 0
        self.spendable_outputs = []

        # Create a new block
        b0 = self.next_block(0)
        self.save_spendable_output()
        self.sync_blocks([b0])

        # Allow the block to mature
        blocks = []
        for i in range(99):
            blocks.append(self.next_block(5000 + i))
            self.save_spendable_output()
        self.sync_blocks(blocks)

        # collect spendable outputs now to avoid cluttering the code later on
        out = []
        for i in range(33):
            out.append(self.get_spendable_output())

        # Start by building a couple of blocks on top (which output is spent is
        # in parentheses):
        #     genesis -> b1 (0) -> b2 (1)
        b1 = self.next_block(1, spend=out[0])
        self.save_spendable_output()

        b2 = self.next_block(2, spend=out[1])
        self.save_spendable_output()

        self.sync_blocks([b1, b2])

        # Fork like this:
        #
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1)
        #
        # Nothing should happen at this point. We saw b2 first so it takes
        # priority.
        self.log.info("Don't reorg to a chain of the same length")
        self.move_tip(1)
        b3 = self.next_block(3, spend=out[1])
        txout_b3 = b3.vtx[1]
        self.sync_blocks([b3], False)

        # Now we add another block to make the alternative chain longer.
        #
        #     genesis -> b1 (0) -> b2 (1)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info("Reorg to a longer chain")
        b4 = self.next_block(4, spend=out[2])
        self.sync_blocks([b4])

        # ... and back to the first chain.
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                      \-> b3 (1) -> b4 (2)
        self.move_tip(2)
        b5 = self.next_block(5, spend=out[2])
        self.save_spendable_output()
        self.sync_blocks([b5], False)

        self.log.info("Reorg back to the original chain")
        b6 = self.next_block(6, spend=out[3])
        self.sync_blocks([b6], True)

        # Try to create a fork that double-spends
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                          \-> b7 (2) -> b8 (4)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info(
            "Reject a chain with a double spend, even if it is longer")
        self.move_tip(5)
        b7 = self.next_block(7, spend=out[2])
        self.sync_blocks([b7], False)

        b8 = self.next_block(8, spend=out[4])
        self.sync_blocks([b8], False, reconnect=True)

        # Try to create a block that has too much fee
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
        #                                                    \-> b9 (4)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info(
            "Reject a block where the miner creates too much coinbase reward")
        self.move_tip(6)
        b9 = self.next_block(9, spend=out[4], additional_coinbase_value=1)
        self.sync_blocks([b9], success=False,
                         reject_reason='bad-cb-amount', reconnect=True)

        # Create a fork that ends in a block with too much fee (the one that causes the reorg)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b10 (3) -> b11 (4)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info(
            "Reject a chain where the miner creates too much coinbase reward, even if the chain is longer")
        self.move_tip(5)
        b10 = self.next_block(10, spend=out[3])
        self.sync_blocks([b10], False)

        b11 = self.next_block(11, spend=out[4], additional_coinbase_value=1)
        self.sync_blocks([b11], success=False,
                         reject_reason='bad-cb-amount', reconnect=True)

        # Try again, but with a valid fork first
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b14 (5)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info(
            "Reject a chain where the miner creates too much coinbase reward, even if the chain is longer (on a forked chain)")
        self.move_tip(5)
        b12 = self.next_block(12, spend=out[3])
        self.save_spendable_output()
        b13 = self.next_block(13, spend=out[4])
        self.save_spendable_output()
        b14 = self.next_block(14, spend=out[5], additional_coinbase_value=1)
        self.sync_blocks([b12, b13, b14], success=False,
                         reject_reason='bad-cb-amount', reconnect=True)

        # New tip should be b13.
        assert_equal(node.getbestblockhash(), b13.hash)

        self.log.info("Skipped sigops tests")
        # tests were moved to feature_block_sigops.py
        self.move_tip(13)
        b15 = self.next_block(15)
        self.save_spendable_output()
        self.sync_blocks([b15], True)

        # Attempt to spend a transaction created on a different fork
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
        #                      \-> b3 (1) -> b4 (2)
        self.log.info("Reject a block with a spend from a re-org'ed out tx")
        self.move_tip(15)
        b17 = self.next_block(17, spend=txout_b3)
        self.sync_blocks([b17], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        # Attempt to spend a transaction created on a different fork (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b18 (b3.vtx[1]) -> b19 (6)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info(
            "Reject a block with a spend from a re-org'ed out tx (on a forked chain)")
        self.move_tip(13)
        b18 = self.next_block(18, spend=txout_b3)
        self.sync_blocks([b18], False)

        b19 = self.next_block(19, spend=out[6])
        self.sync_blocks([b19], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        # Attempt to spend a coinbase at depth too low
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info("Reject a block spending an immature coinbase.")
        self.move_tip(15)
        b20 = self.next_block(20, spend=out[7])
        self.sync_blocks([b20], success=False,
                         reject_reason='bad-txns-premature-spend-of-coinbase')

        # Attempt to spend a coinbase at depth too low (on a fork this time)
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5)
        #                                                                \-> b21 (6) -> b22 (5)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info(
            "Reject a block spending an immature coinbase (on a forked chain)")
        self.move_tip(13)
        b21 = self.next_block(21, spend=out[6])
        self.sync_blocks([b21], False)

        b22 = self.next_block(22, spend=out[5])
        self.sync_blocks([b22], success=False,
                         reject_reason='bad-txns-premature-spend-of-coinbase')

        # Create a block on either side of LEGACY_MAX_BLOCK_SIZE and make sure its accepted/rejected
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
        #                                                                           \-> b24 (6) -> b25 (7)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info("Accept a block of size LEGACY_MAX_BLOCK_SIZE")
        self.move_tip(15)
        b23 = self.next_block(23, spend=out[6])
        tx = CTransaction()
        script_length = LEGACY_MAX_BLOCK_SIZE - len(b23.serialize()) - 69
        script_output = CScript([b'\x00' * script_length])
        tx.vout.append(CTxOut(0, script_output))
        tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0)))
        b23 = self.update_block(23, [tx])
        # Make sure the math above worked out to produce a max-sized block
        assert_equal(len(b23.serialize()), LEGACY_MAX_BLOCK_SIZE)
        self.sync_blocks([b23], True)
        self.save_spendable_output()

        # Create blocks with a coinbase input script size out of range
        #     genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6  (3)
        #                                          \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
        #                                                                           \-> ... (6) -> ... (7)
        #                      \-> b3 (1) -> b4 (2)
        self.log.info(
            "Reject a block with coinbase input script size out of range")
        self.move_tip(15)
        b26 = self.next_block(26, spend=out[6])
        b26.vtx[0].vin[0].scriptSig = b'\x00'
        b26.vtx[0].rehash()
        # update_block causes the merkle root to get updated, even with no new
        # transactions, and updates the required state.
        b26 = self.update_block(26, [])
        self.sync_blocks([b26], success=False,
                         reject_reason='bad-cb-length', reconnect=True)

        # Extend the b26 chain to make sure bitcoind isn't accepting b26
        b27 = self.next_block(27, spend=out[7])
        self.sync_blocks([b27], False)

        # Now try a too-large-coinbase script
        self.move_tip(15)
        b28 = self.next_block(28, spend=out[6])
        b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
        b28.vtx[0].rehash()
        b28 = self.update_block(28, [])
        self.sync_blocks([b28], success=False,
                         reject_reason='bad-cb-length', reconnect=True)

        # Extend the b28 chain to make sure bitcoind isn't accepting b28
        b29 = self.next_block(29, spend=out[7])
        self.sync_blocks([b29], False)

        # b30 has a max-sized coinbase scriptSig.
        self.move_tip(23)
        b30 = self.next_block(30)
        b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
        b30.vtx[0].rehash()
        b30 = self.update_block(30, [])
        self.sync_blocks([b30], True)
        self.save_spendable_output()

        self.log.info("Skipped sigops tests")
        # tests were moved to feature_block_sigops.py
        b31 = self.next_block(31)
        self.save_spendable_output()
        b33 = self.next_block(33)
        self.save_spendable_output()
        b35 = self.next_block(35)
        self.save_spendable_output()
        self.sync_blocks([b31, b33, b35], True)

        # Check spending of a transaction in a block which failed to connect
        #
        # b6  (3)
        # b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10)
        #                                                                                     \-> b37 (11)
        #                                                                                     \-> b38 (11/37)
        #

        # save 37's spendable output, but then double-spend out11 to invalidate
        # the block
        self.log.info(
            "Reject a block spending transaction from a block which failed to connect")
        self.move_tip(35)
        b37 = self.next_block(37, spend=out[11])
        txout_b37 = b37.vtx[1]
        tx = self.create_and_sign_transaction(out[11], 0)
        b37 = self.update_block(37, [tx])
        self.sync_blocks([b37], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        # attempt to spend b37's first non-coinbase tx, at which point b37 was
        # still considered valid
        self.move_tip(35)
        b38 = self.next_block(38, spend=txout_b37)
        self.sync_blocks([b38], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        self.log.info("Skipped sigops tests")
        # tests were moved to feature_block_sigops.py
        self.move_tip(35)
        b39 = self.next_block(39)
        self.save_spendable_output()
        b41 = self.next_block(41)
        self.sync_blocks([b39, b41], True)

        # Fork off of b39 to create a constant base again
        #
        # b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13)
        #                                                                  \-> b41 (12)
        #
        self.move_tip(39)
        b42 = self.next_block(42, spend=out[12])
        self.save_spendable_output()

        b43 = self.next_block(43, spend=out[13])
        self.save_spendable_output()
        self.sync_blocks([b42, b43], True)

        # Test a number of really invalid scenarios
        #
        #  -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14)
        #                                                                                   \-> ??? (15)

        # The next few blocks are going to be created "by hand" since they'll do funky things, such as having
        # the first transaction be non-coinbase, etc.  The purpose of b44 is to
        # make sure this works.
        self.log.info("Build block 44 manually")
        height = self.block_heights[self.tip.sha256] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        b44 = CBlock()
        b44.nTime = self.tip.nTime + 1
        b44.hashPrevBlock = self.tip.sha256
        b44.nBits = 0x207fffff
        b44.vtx.append(coinbase)
        b44.hashMerkleRoot = b44.calc_merkle_root()
        b44.solve()
        self.tip = b44
        self.block_heights[b44.sha256] = height
        self.blocks[44] = b44
        self.sync_blocks([b44], True)

        self.log.info("Reject a block with a non-coinbase as the first tx")
        non_coinbase = self.create_tx(out[15], 0, 1)
        b45 = CBlock()
        b45.nTime = self.tip.nTime + 1
        b45.hashPrevBlock = self.tip.sha256
        b45.nBits = 0x207fffff
        b45.vtx.append(non_coinbase)
        b45.hashMerkleRoot = b45.calc_merkle_root()
        b45.calc_sha256()
        b45.solve()
        self.block_heights[b45.sha256] = self.block_heights[
            self.tip.sha256] + 1
        self.tip = b45
        self.blocks[45] = b45
        self.sync_blocks([b45], success=False,
                         reject_reason='bad-cb-missing', reconnect=True)

        self.log.info("Reject a block with no transactions")
        self.move_tip(44)
        b46 = CBlock()
        b46.nTime = b44.nTime + 1
        b46.hashPrevBlock = b44.sha256
        b46.nBits = 0x207fffff
        b46.vtx = []
        b46.hashMerkleRoot = 0
        b46.solve()
        self.block_heights[b46.sha256] = self.block_heights[b44.sha256] + 1
        self.tip = b46
        assert 46 not in self.blocks
        self.blocks[46] = b46
        self.sync_blocks([b46], success=False,
                         reject_reason='bad-cb-missing', reconnect=True)

        self.log.info("Reject a block with invalid work")
        self.move_tip(44)
        b47 = self.next_block(47, solve=False)
        target = uint256_from_compact(b47.nBits)
        while b47.sha256 < target:
            b47.nNonce += 1
            b47.rehash()
        self.sync_blocks([b47], False, request_block=False)

        self.log.info("Reject a block with a timestamp >2 hours in the future")
        self.move_tip(44)
        b48 = self.next_block(48, solve=False)
        b48.nTime = int(time.time()) + 60 * 60 * 3
        b48.solve()
        self.sync_blocks([b48], False, request_block=False)

        self.log.info("Reject a block with invalid merkle hash")
        self.move_tip(44)
        b49 = self.next_block(49)
        b49.hashMerkleRoot += 1
        b49.solve()
        self.sync_blocks([b49], success=False,
                         reject_reason='bad-txnmrklroot', reconnect=True)

        self.log.info("Reject a block with incorrect POW limit")
        self.move_tip(44)
        b50 = self.next_block(50)
        b50.nBits = b50.nBits - 1
        b50.solve()
        self.sync_blocks([b50], False, request_block=False, reconnect=True)

        self.log.info("Reject a block with two coinbase transactions")
        self.move_tip(44)
        b51 = self.next_block(51)
        cb2 = create_coinbase(51, self.coinbase_pubkey)
        b51 = self.update_block(51, [cb2])
        self.sync_blocks([b51], success=False,
                         reject_reason='bad-tx-coinbase', reconnect=True)

        self.log.info("Reject a block with duplicate transactions")
        self.move_tip(44)
        b52 = self.next_block(52, spend=out[15])
        b52 = self.update_block(52, [b52.vtx[1]])
        self.sync_blocks([b52], success=False,
                         reject_reason='tx-duplicate', reconnect=True)

        # Test block timestamps
        #  -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15)
        #                                                                                   \-> b54 (15)
        #
        self.move_tip(43)
        b53 = self.next_block(53, spend=out[14])
        self.sync_blocks([b53], False)
        self.save_spendable_output()

        self.log.info("Reject a block with timestamp before MedianTimePast")
        b54 = self.next_block(54, spend=out[15])
        b54.nTime = b35.nTime - 1
        b54.solve()
        self.sync_blocks([b54], False, request_block=False)

        # valid timestamp
        self.move_tip(53)
        b55 = self.next_block(55, spend=out[15])
        b55.nTime = b35.nTime
        self.update_block(55, [])
        self.sync_blocks([b55], True)
        self.save_spendable_output()

        # Test Merkle tree malleability
        #
        # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16)
        #                                                \-> b57   (16)
        #                                                \-> b56p2 (16)
        #                                                \-> b56   (16)
        #
        # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without
        #                           affecting the merkle root of a block, while still invalidating it.
        #                           See:  src/consensus/merkle.h
        #
        #  b57 has three txns:  coinbase, tx, tx1.  The merkle root computation will duplicate tx.
        #  Result:  OK
        #
        #  b56 copies b57 but duplicates tx1 and does not recalculate the block hash.  So it has a valid merkle
        #  root but duplicate transactions.
        #  Result:  Fails
        #
        #  b57p2 has six transactions in its merkle tree:
        #       - coinbase, tx, tx1, tx2, tx3, tx4
        #  Merkle root calculation will duplicate as necessary.
        #  Result:  OK.
        #
        #  b56p2 copies b57p2 but adds both tx3 and tx4.  The purpose of the test is to make sure the code catches
        #  duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates
        #  that the error was caught early, avoiding a DOS vulnerability.)

        # b57 - a good block with 2 txs, don't submit until end
        self.move_tip(55)
        b57 = self.next_block(57)
        tx = self.create_and_sign_transaction(out[16], 1)
        tx1 = self.create_tx(tx, 0, 1)
        b57 = self.update_block(57, [tx, tx1])

        # b56 - copy b57, add a duplicate tx
        self.log.info(
            "Reject a block with a duplicate transaction in the Merkle Tree (but with a valid Merkle Root)")
        self.move_tip(55)
        b56 = copy.deepcopy(b57)
        self.blocks[56] = b56
        assert_equal(len(b56.vtx), 3)
        b56 = self.update_block(56, [b57.vtx[2]])
        assert_equal(b56.hash, b57.hash)
        self.sync_blocks([b56], success=False,
                         reject_reason='bad-txns-duplicate', reconnect=True)

        # b57p2 - a good block with 6 tx'es, don't submit until end
        self.move_tip(55)
        b57p2 = self.next_block("57p2")
        tx = self.create_and_sign_transaction(out[16], 1)
        tx1 = self.create_tx(tx, 0, 1)
        tx2 = self.create_tx(tx1, 0, 1)
        tx3 = self.create_tx(tx2, 0, 1)
        tx4 = self.create_tx(tx3, 0, 1)
        b57p2 = self.update_block("57p2", [tx, tx1, tx2, tx3, tx4])

        # b56p2 - copy b57p2, duplicate two non-consecutive tx's
        self.log.info(
            "Reject a block with two duplicate transactions in the Merkle Tree (but with a valid Merkle Root)")
        self.move_tip(55)
        b56p2 = copy.deepcopy(b57p2)
        self.blocks["b56p2"] = b56p2
        assert_equal(len(b56p2.vtx), 6)
        b56p2 = self.update_block("b56p2", b56p2.vtx[4:6], reorder=False)
        assert_equal(b56p2.hash, b57p2.hash)
        self.sync_blocks([b56p2], success=False,
                         reject_reason='bad-txns-duplicate', reconnect=True)

        self.move_tip("57p2")
        self.sync_blocks([b57p2], True)

        self.move_tip(57)
        # The tip is not updated because 57p2 seen first
        self.sync_blocks([b57], False)
        self.save_spendable_output()

        # Test a few invalid tx types
        #
        # -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                    \-> ??? (17)
        #

        # tx with prevout.n out of range
        self.log.info(
            "Reject a block with a transaction with prevout.n out of range")
        self.move_tip(57)
        b58 = self.next_block(58, spend=out[17])
        tx = CTransaction()
        assert(len(out[17].vout) < 42)
        tx.vin.append(
            CTxIn(COutPoint(out[17].sha256, 42), CScript([OP_TRUE]), 0xffffffff))
        tx.vout.append(CTxOut(0, b""))
        pad_tx(tx)
        tx.calc_sha256()
        b58 = self.update_block(58, [tx])
        self.sync_blocks([b58], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        # tx with output value > input value
        self.log.info(
            "Reject a block with a transaction with outputs > inputs")
        self.move_tip(57)
        b59 = self.next_block(59)
        tx = self.create_and_sign_transaction(out[17], 51 * COIN)
        b59 = self.update_block(59, [tx])
        self.sync_blocks([b59], success=False,
                         reject_reason='bad-txns-in-belowout', reconnect=True)

        # reset to good chain
        self.move_tip(57)
        b60 = self.next_block(60, spend=out[17])
        self.sync_blocks([b60], True)
        self.save_spendable_output()

        # Test BIP30
        #
        # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                    \-> b61 (18)
        #
        # Blocks are not allowed to contain a transaction whose id matches that of an earlier,
        # not-fully-spent transaction in the same chain. To test, make identical coinbases;
        # the second one should be rejected.
        #
        self.log.info(
            "Reject a block with a transaction with a duplicate hash of a previous transaction (BIP30)")
        self.move_tip(60)
        b61 = self.next_block(61, spend=out[18])
        # Equalize the coinbases
        b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig
        b61.vtx[0].rehash()
        b61 = self.update_block(61, [])
        assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize())
        self.sync_blocks([b61], success=False,
                         reject_reason='bad-txns-BIP30', reconnect=True)

        # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests)
        #
        #   -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                     \-> b62 (18)
        #
        self.log.info(
            "Reject a block with a transaction with a nonfinal locktime")
        self.move_tip(60)
        b62 = self.next_block(62)
        tx = CTransaction()
        tx.nLockTime = 0xffffffff  # this locktime is non-final
        # don't set nSequence
        tx.vin.append(CTxIn(COutPoint(out[18].sha256, 0)))
        tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
        assert tx.vin[0].nSequence < 0xffffffff
        tx.calc_sha256()
        b62 = self.update_block(62, [tx])
        self.sync_blocks([b62], success=False,
                         reject_reason='bad-txns-nonfinal')

        # Test a non-final coinbase is also rejected
        #
        #   -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
        #                                                                                     \-> b63 (-)
        #
        self.log.info(
            "Reject a block with a coinbase transaction with a nonfinal locktime")
        self.move_tip(60)
        b63 = self.next_block(63)
        b63.vtx[0].nLockTime = 0xffffffff
        b63.vtx[0].vin[0].nSequence = 0xDEADBEEF
        b63.vtx[0].rehash()
        b63 = self.update_block(63, [])
        self.sync_blocks([b63], success=False,
                         reject_reason='bad-txns-nonfinal')

        #  This checks that a block with a bloated VARINT between the block_header and the array of tx such that
        #  the block is > LEGACY_MAX_BLOCK_SIZE with the bloated varint, but <= LEGACY_MAX_BLOCK_SIZE without the bloated varint,
        #  does not cause a subsequent, identical block with canonical encoding to be rejected.  The test does not
        #  care whether the bloated block is accepted or rejected; it only cares that the second block is accepted.
        #
        #  What matters is that the receiving node should not reject the bloated block, and then reject the canonical
        #  block on the basis that it's the same as an already-rejected block (which would be a consensus failure.)
        #
        #  -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18)
        #                                                                                        \
        #                                                                                         b64a (18)
        #  b64a is a bloated block (non-canonical varint)
        #  b64 is a good block (same as b64 but w/ canonical varint)
        #
        self.log.info(
            "Accept a valid block even if a bloated version of the block has previously been sent")
        self.move_tip(60)
        regular_block = self.next_block("64a", spend=out[18])

        # make it a "broken_block," with non-canonical serialization
        b64a = CBrokenBlock(regular_block)
        b64a.initialize(regular_block)
        self.blocks["64a"] = b64a
        self.tip = b64a
        tx = CTransaction()

        # use canonical serialization to calculate size
        script_length = LEGACY_MAX_BLOCK_SIZE - \
            len(b64a.normal_serialize()) - 69
        script_output = CScript([b'\x00' * script_length])
        tx.vout.append(CTxOut(0, script_output))
        tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0)))
        b64a = self.update_block("64a", [tx])
        assert_equal(len(b64a.serialize()), LEGACY_MAX_BLOCK_SIZE + 8)
        self.sync_blocks([b64a], success=False,
                         reject_reason='non-canonical ReadCompactSize()')

        # bitcoind doesn't disconnect us for sending a bloated block, but if we subsequently
        # resend the header message, it won't send us the getdata message again. Just
        # disconnect and reconnect and then call sync_blocks.
        # TODO: improve this test to be less dependent on P2P DOS behaviour.
        node.disconnect_p2ps()
        self.reconnect_p2p()

        self.move_tip(60)
        b64 = CBlock(b64a)
        b64.vtx = copy.deepcopy(b64a.vtx)
        assert_equal(b64.hash, b64a.hash)
        assert_equal(len(b64.serialize()), LEGACY_MAX_BLOCK_SIZE)
        self.blocks[64] = b64
        b64 = self.update_block(64, [])
        self.sync_blocks([b64], True)
        self.save_spendable_output()

        # Spend an output created in the block itself
        #
        # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
        #
        self.log.info(
            "Accept a block with a transaction spending an output created in the same block")
        self.move_tip(64)
        b65 = self.next_block(65)
        tx1 = self.create_and_sign_transaction(out[19], out[19].vout[0].nValue)
        tx2 = self.create_and_sign_transaction(tx1, 0)
        b65 = self.update_block(65, [tx1, tx2])
        self.sync_blocks([b65], True)
        self.save_spendable_output()

        # Attempt to double-spend a transaction created in a block
        #
        # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
        #                                                                                    \-> b67 (20)
        #
        #
        self.log.info(
            "Reject a block with a transaction double spending a transaction created in the same block")
        self.move_tip(65)
        b67 = self.next_block(67)
        tx1 = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue)
        tx2 = self.create_and_sign_transaction(tx1, 1)
        tx3 = self.create_and_sign_transaction(tx1, 2)
        b67 = self.update_block(67, [tx1, tx2, tx3])
        self.sync_blocks([b67], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        # More tests of block subsidy
        #
        # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20)
        #                                                                                    \-> b68 (20)
        #
        # b68 - coinbase with an extra 10 satoshis,
        #       creates a tx that has 9 satoshis from out[20] go to fees
        #       this fails because the coinbase is trying to claim 1 satoshi too much in fees
        #
        # b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee
        #       this succeeds
        #
        self.log.info(
            "Reject a block trying to claim too much subsidy in the coinbase transaction")
        self.move_tip(65)
        b68 = self.next_block(68, additional_coinbase_value=10)
        tx = self.create_and_sign_transaction(
            out[20], out[20].vout[0].nValue - 9)
        b68 = self.update_block(68, [tx])
        self.sync_blocks([b68], success=False,
                         reject_reason='bad-cb-amount', reconnect=True)

        self.log.info(
            "Accept a block claiming the correct subsidy in the coinbase transaction")
        self.move_tip(65)
        b69 = self.next_block(69, additional_coinbase_value=10)
        tx = self.create_and_sign_transaction(
            out[20], out[20].vout[0].nValue - 10)
        self.update_block(69, [tx])
        self.sync_blocks([b69], True)
        self.save_spendable_output()

        # Test spending the outpoint of a non-existent transaction
        #
        # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20)
        #                                                                                    \-> b70 (21)
        #
        self.log.info(
            "Reject a block containing a transaction spending from a non-existent input")
        self.move_tip(69)
        b70 = self.next_block(70, spend=out[21])
        bogus_tx = CTransaction()
        bogus_tx.sha256 = uint256_from_str(
            b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c")
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff))
        tx.vout.append(CTxOut(1, b""))
        pad_tx(tx)
        b70 = self.update_block(70, [tx])
        self.sync_blocks([b70], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        # Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks)
        #
        #  -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21)
        #                                                                                      \-> b71 (21)
        #
        # b72 is a good block.
        # b71 is a copy of 72, but re-adds one of its transactions.  However,
        # it has the same hash as b72.
        self.log.info(
            "Reject a block containing a duplicate transaction but with the same Merkle root (Merkle tree malleability")
        self.move_tip(69)
        b72 = self.next_block(72)
        tx1 = self.create_and_sign_transaction(out[21], 2)
        tx2 = self.create_and_sign_transaction(tx1, 1)
        b72 = self.update_block(72, [tx1, tx2])  # now tip is 72
        b71 = copy.deepcopy(b72)
        # add duplicate last transaction
        b71.vtx.append(b72.vtx[-1])
        # b71 builds off b69
        self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1
        self.blocks[71] = b71

        assert_equal(len(b71.vtx), 4)
        assert_equal(len(b72.vtx), 3)
        assert_equal(b72.sha256, b71.sha256)

        self.move_tip(71)
        self.sync_blocks([b71], success=False,
                         reject_reason='bad-txns-duplicate', reconnect=True)

        self.move_tip(72)
        self.sync_blocks([b72], True)
        self.save_spendable_output()

        self.log.info("Skipped sigops tests")
        # tests were moved to feature_block_sigops.py
        b75 = self.next_block(75)
        self.save_spendable_output()
        b76 = self.next_block(76)
        self.save_spendable_output()
        self.sync_blocks([b75, b76], True)

        # Test transaction resurrection
        #
        # -> b77 (24) -> b78 (25) -> b79 (26)
        #            \-> b80 (25) -> b81 (26) -> b82 (27)
        #
        #    b78 creates a tx, which is spent in b79. After b82, both should be in mempool
        #
        #    The tx'es must be unsigned and pass the node's mempool policy.  It is unsigned for the
        #    rather obscure reason that the Python signature code does not distinguish between
        #    Low-S and High-S values (whereas the bitcoin code has custom code which does so);
        #    as a result of which, the odds are 50% that the python code will use the right
        #    value and the transaction will be accepted into the mempool. Until we modify the
        #    test framework to support low-S signing, we are out of luck.
        #
        #    To get around this issue, we construct transactions which are not signed and which
        #    spend to OP_TRUE.  If the standard-ness rules change, this test would need to be
        #    updated.  (Perhaps to spend to a P2SH OP_TRUE script)
        self.log.info("Test transaction resurrection during a re-org")
        self.move_tip(76)
        b77 = self.next_block(77)
        tx77 = self.create_and_sign_transaction(out[24], 10 * COIN)
        b77 = self.update_block(77, [tx77])
        self.sync_blocks([b77], True)
        self.save_spendable_output()

        b78 = self.next_block(78)
        tx78 = self.create_tx(tx77, 0, 9 * COIN)
        b78 = self.update_block(78, [tx78])
        self.sync_blocks([b78], True)

        b79 = self.next_block(79)
        tx79 = self.create_tx(tx78, 0, 8 * COIN)
        b79 = self.update_block(79, [tx79])
        self.sync_blocks([b79], True)

        # mempool should be empty
        assert_equal(len(self.nodes[0].getrawmempool()), 0)

        self.move_tip(77)
        b80 = self.next_block(80, spend=out[25])
        self.sync_blocks([b80], False, request_block=False)
        self.save_spendable_output()

        b81 = self.next_block(81, spend=out[26])
        # other chain is same length
        self.sync_blocks([b81], False, request_block=False)
        self.save_spendable_output()

        b82 = self.next_block(82, spend=out[27])
        # now this chain is longer, triggers re-org
        self.sync_blocks([b82], True)
        self.save_spendable_output()

        # now check that tx78 and tx79 have been put back into the peer's
        # mempool
        mempool = self.nodes[0].getrawmempool()
        assert_equal(len(mempool), 2)
        assert tx78.hash in mempool
        assert tx79.hash in mempool

        # Test invalid opcodes in dead execution paths.
        #
        #  -> b81 (26) -> b82 (27) -> b83 (28)
        #
        self.log.info(
            "Accept a block with invalid opcodes in dead execution paths")
        b83 = self.next_block(83)
        op_codes = [OP_IF, INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF]
        script = CScript(op_codes)
        tx1 = self.create_and_sign_transaction(
            out[28], out[28].vout[0].nValue, script)

        tx2 = self.create_and_sign_transaction(tx1, 0, CScript([OP_TRUE]))
        tx2.vin[0].scriptSig = CScript([OP_FALSE])
        tx2.rehash()

        b83 = self.update_block(83, [tx1, tx2])
        self.sync_blocks([b83], True)
        self.save_spendable_output()

        # Reorg on/off blocks that have OP_RETURN in them (and try to spend them)
        #
        #  -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31)
        #                                    \-> b85 (29) -> b86 (30)            \-> b89a (32)
        #
        self.log.info("Test re-orging blocks with OP_RETURN in them")
        b84 = self.next_block(84)
        tx1 = self.create_tx(out[29], 0, 0, CScript([OP_RETURN]))
        vout_offset = len(tx1.vout)
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx1.calc_sha256()
        self.sign_tx(tx1, out[29])
        tx1.rehash()
        tx2 = self.create_tx(tx1, vout_offset, 0, CScript([OP_RETURN]))
        tx2.vout.append(CTxOut(0, CScript([OP_RETURN])))
        tx3 = self.create_tx(tx1, vout_offset + 1, 0, CScript([OP_RETURN]))
        tx3.vout.append(CTxOut(0, CScript([OP_TRUE])))
        tx4 = self.create_tx(tx1, vout_offset + 2, 0, CScript([OP_TRUE]))
        tx4.vout.append(CTxOut(0, CScript([OP_RETURN])))
        tx5 = self.create_tx(tx1, vout_offset + 3, 0, CScript([OP_RETURN]))

        b84 = self.update_block(84, [tx1, tx2, tx3, tx4, tx5])
        self.sync_blocks([b84], True)
        self.save_spendable_output()

        self.move_tip(83)
        b85 = self.next_block(85, spend=out[29])
        self.sync_blocks([b85], False)  # other chain is same length

        b86 = self.next_block(86, spend=out[30])
        self.sync_blocks([b86], True)

        self.move_tip(84)
        b87 = self.next_block(87, spend=out[30])
        self.sync_blocks([b87], False)  # other chain is same length
        self.save_spendable_output()

        b88 = self.next_block(88, spend=out[31])
        self.sync_blocks([b88], True)
        self.save_spendable_output()

        # trying to spend the OP_RETURN output is rejected
        b89a = self.next_block("89a", spend=out[32])
        tx = self.create_tx(tx1, 0, 0, CScript([OP_TRUE]))
        b89a = self.update_block("89a", [tx])
        self.sync_blocks([b89a], success=False,
                         reject_reason='bad-txns-inputs-missingorspent', reconnect=True)

        self.log.info(
            "Test a re-org of one week's worth of blocks (1088 blocks)")

        self.move_tip(88)
        LARGE_REORG_SIZE = 1088
        blocks = []
        spend = out[32]
        for i in range(89, LARGE_REORG_SIZE + 89):
            b = self.next_block(i, spend)
            tx = CTransaction()
            script_length = LEGACY_MAX_BLOCK_SIZE - len(b.serialize()) - 69
            script_output = CScript([b'\x00' * script_length])
            tx.vout.append(CTxOut(0, script_output))
            tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0)))
            b = self.update_block(i, [tx])
            assert_equal(len(b.serialize()), LEGACY_MAX_BLOCK_SIZE)
            blocks.append(b)
            self.save_spendable_output()
            spend = self.get_spendable_output()

        self.sync_blocks(blocks, True, timeout=960)
        chain1_tip = i

        # now create alt chain of same length
        self.move_tip(88)
        blocks2 = []
        for i in range(89, LARGE_REORG_SIZE + 89):
            blocks2.append(self.next_block("alt" + str(i)))
        self.sync_blocks(blocks2, False, request_block=False)

        # extend alt chain to trigger re-org
        block = self.next_block("alt" + str(chain1_tip + 1))
        self.sync_blocks([block], True, timeout=960)

        # ... and re-org back to the first chain
        self.move_tip(chain1_tip)
        block = self.next_block(chain1_tip + 1)
        self.sync_blocks([block], False, request_block=False)
        block = self.next_block(chain1_tip + 2)
        self.sync_blocks([block], True, timeout=960)

    # Helper methods
    ################

    def add_transactions_to_block(self, block, tx_list):
        [tx.rehash() for tx in tx_list]
        block.vtx.extend(tx_list)

    # this is a little handier to use than the version in blocktools.py
    def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
        return create_tx_with_script(
            spend_tx, n, amount=value, script_pub_key=script)

    # sign a transaction, using the key we know about
    # this signs input 0 in tx, which is assumed to be spending output n in
    # spend_tx
    def sign_tx(self, tx, spend_tx):
        scriptPubKey = bytearray(spend_tx.vout[0].scriptPubKey)
        if (scriptPubKey[0] == OP_TRUE):  # an anyone-can-spend
            tx.vin[0].scriptSig = CScript()
            return
        sighash = SignatureHashForkId(
            spend_tx.vout[0].scriptPubKey, tx, 0, SIGHASH_ALL | SIGHASH_FORKID, spend_tx.vout[0].nValue)
        tx.vin[0].scriptSig = CScript(
            [self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))])

    def create_and_sign_transaction(
            self, spend_tx, value, script=CScript([OP_TRUE])):
        tx = self.create_tx(spend_tx, 0, value, script)
        self.sign_tx(tx, spend_tx)
        tx.rehash()
        return tx

    def next_block(self, number, spend=None, additional_coinbase_value=0,
                   script=CScript([OP_TRUE]), solve=True):
        if self.tip is None:
            base_block_hash = self.genesis_hash
            block_time = int(time.time()) + 1
        else:
            base_block_hash = self.tip.sha256
            block_time = self.tip.nTime + 1
        # First create the coinbase
        height = self.block_heights[base_block_hash] + 1
        coinbase = create_coinbase(height, self.coinbase_pubkey)
        coinbase.vout[0].nValue += additional_coinbase_value
        coinbase.rehash()
        if spend is None:
            block = create_block(base_block_hash, coinbase, block_time)
        else:
            # all but one satoshi to fees
            coinbase.vout[0].nValue += spend.vout[0].nValue - 1
            coinbase.rehash()
            block = create_block(base_block_hash, coinbase, block_time)
            # spend 1 satoshi
            tx = self.create_tx(spend, 0, 1, script)
            self.sign_tx(tx, spend)
            self.add_transactions_to_block(block, [tx])
            block.hashMerkleRoot = block.calc_merkle_root()
        if solve:
            block.solve()
        self.tip = block
        self.block_heights[block.sha256] = height
        assert number not in self.blocks
        self.blocks[number] = block
        return block

    # save the current tip so it can be spent by a later block
    def save_spendable_output(self):
        self.log.debug(f"saving spendable output {self.tip.vtx[0]}")
        self.spendable_outputs.append(self.tip)

    # get an output that we previously marked as spendable
    def get_spendable_output(self):
        self.log.debug(f"getting spendable output {self.spendable_outputs[0].vtx[0]}")
        return self.spendable_outputs.pop(0).vtx[0]

    # move the tip back to a previous block
    def move_tip(self, number):
        self.tip = self.blocks[number]

    # adds transactions to the block and updates state
    def update_block(self, block_number, new_transactions, reorder=True):
        block = self.blocks[block_number]
        self.add_transactions_to_block(block, new_transactions)
        old_sha256 = block.sha256
        if reorder:
            make_conform_to_ctor(block)
        block.hashMerkleRoot = block.calc_merkle_root()
        block.solve()
        # Update the internal state just like in next_block
        self.tip = block
        if block.sha256 != old_sha256:
            self.block_heights[block.sha256] = self.block_heights[old_sha256]
            del self.block_heights[old_sha256]
        self.blocks[block_number] = block
        return block

    def bootstrap_p2p(self):
        """Add a P2P connection to the node.

        Helper to connect and wait for version handshake."""
        self.nodes[0].add_p2p_connection(P2PDataStore())
        # We need to wait for the initial getheaders from the peer before we
        # start populating our blockstore. If we don't, then we may run ahead
        # to the next subtest before we receive the getheaders. We'd then send
        # an INV for the next block and receive two getheaders - one for the
        # IBD and one for the INV. We'd respond to both and could get
        # unexpectedly disconnected if the DoS score for that error is 50.
        self.nodes[0].p2p.wait_for_getheaders(timeout=5)

    def reconnect_p2p(self):
        """Tear down and bootstrap the P2P connection to the node.

        The node gets disconnected several times in this test. This helper
        method reconnects the p2p and restarts the network thread."""
        self.nodes[0].disconnect_p2ps()
        self.bootstrap_p2p()

    def sync_blocks(self, blocks, success=True, reject_reason=None,
                    request_block=True, reconnect=False, timeout=60):
        """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.

        Call with success = False if the tip shouldn't advance to the most recent block."""
        self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success,
                                               reject_reason=reject_reason, request_block=request_block, timeout=timeout, expect_disconnect=reconnect)

        if reconnect:
            self.reconnect_p2p()
Esempio n. 25
0
    def run_test(self):
        node = self.nodes[0]

        addrkey0 = node.get_deterministic_priv_key()
        blockhashes = node.generatetoaddress(2, addrkey0.address)
        stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key)

        privkey = ECKey()
        privkey.generate()

        proof_master = privkey.get_pubkey().get_bytes().hex()
        proof_sequence = 42
        proof_expiration = 2000000000
        proof = node.buildavalancheproof(proof_sequence, proof_expiration,
                                         proof_master, stakes)

        nodeid = add_interface_node(node)

        def check_addavalanchenode_error(error_code,
                                         error_message,
                                         nodeid=nodeid,
                                         proof=proof,
                                         pubkey=proof_master,
                                         delegation=None):
            assert_raises_rpc_error(
                error_code,
                error_message,
                node.addavalanchenode,
                nodeid,
                pubkey,
                proof,
                delegation,
            )

        self.log.info("Invalid proof")
        check_addavalanchenode_error(-22,
                                     "Proof must be an hexadecimal string",
                                     proof="not a proof")
        check_addavalanchenode_error(-22,
                                     "Proof has invalid format",
                                     proof="f000")
        no_stake = node.buildavalancheproof(proof_sequence, proof_expiration,
                                            proof_master, [])
        check_addavalanchenode_error(-8,
                                     "The proof is invalid: no-stake",
                                     proof=no_stake)

        self.log.info("Node doesn't exist")
        check_addavalanchenode_error(-8,
                                     f"The node does not exist: {nodeid + 1}",
                                     nodeid=nodeid + 1)

        self.log.info("Invalid delegation")
        dg_privkey = ECKey()
        dg_privkey.generate()
        dg_pubkey = dg_privkey.get_pubkey().get_bytes()
        check_addavalanchenode_error(
            -22,
            "Delegation must be an hexadecimal string",
            pubkey=dg_pubkey.hex(),
            delegation="not a delegation")
        check_addavalanchenode_error(-22,
                                     "Delegation has invalid format",
                                     pubkey=dg_pubkey.hex(),
                                     delegation="f000")

        self.log.info("Delegation mismatch with the proof")
        delegation_wrong_proofid = AvalancheDelegation()
        check_addavalanchenode_error(
            -8,
            "The delegation does not match the proof",
            pubkey=dg_pubkey.hex(),
            delegation=delegation_wrong_proofid.serialize().hex())

        proofobj = FromHex(AvalancheProof(), proof)
        delegation = AvalancheDelegation(
            limited_proofid=proofobj.limited_proofid,
            proof_master=proofobj.master,
        )

        self.log.info("Delegation with bad signature")
        bad_level = AvalancheDelegationLevel(pubkey=dg_pubkey, )
        delegation.levels.append(bad_level)
        check_addavalanchenode_error(-8,
                                     "The delegation is invalid",
                                     pubkey=dg_pubkey.hex(),
                                     delegation=delegation.serialize().hex())

        delegation.levels = []
        level = AvalancheDelegationLevel(pubkey=dg_pubkey,
                                         sig=privkey.sign_schnorr(
                                             hash256(delegation.getid() +
                                                     ser_string(dg_pubkey))))
        delegation.levels.append(level)

        self.log.info("Key mismatch with the proof")
        check_addavalanchenode_error(
            -5,
            "The public key does not match the proof",
            pubkey=dg_pubkey.hex(),
        )

        self.log.info("Key mismatch with the delegation")
        random_privkey = ECKey()
        random_privkey.generate()
        random_pubkey = random_privkey.get_pubkey()
        check_addavalanchenode_error(
            -5,
            "The public key does not match the delegation",
            pubkey=random_pubkey.get_bytes().hex(),
            delegation=delegation.serialize().hex(),
        )

        self.log.info("Happy path")
        assert node.addavalanchenode(nodeid, proof_master, proof)
        # Adding several times is OK
        assert node.addavalanchenode(nodeid, proof_master, proof)

        # Use an hardcoded proof. This will help detecting proof format changes.
        # Generated using:
        # stakes = create_coinbase_stakes(node, [blockhashes[1]], addrkey0.key)
        # hardcoded_proof = node.buildavalancheproof(
        #    proof_sequence, proof_expiration, random_pubkey, stakes)
        hardcoded_pubkey = "037d20fcfe118296bb53f0a8f87c864e7b9831c4fcd7c6a0bb9a58e0e0f53d5cbc"
        hardcoded_proof = (
            "2a00000000000000009435770000000021037d20fcfe118296bb53f0a8f87c864e"
            "7b9831c4fcd7c6a0bb9a58e0e0f53d5cbc01683ef49024cf25bb55775b327f5e68"
            "c79da3a7824dc03df5623c96f4a60158f90000000000f902950000000095010000"
            "210227d85ba011276cf25b51df6a188b75e604b38770a462b2d0e9fb2fc839ef5d"
            "3f612834ef0e2545d6359e9f34967c2bb69cb88fe246fed716d998f3f62eba1ef6"
            "6a547606a7ac14c1b5697f4acc20853b3f99954f4f7b6e9bf8a085616d3adfc7")
        assert node.addavalanchenode(nodeid, hardcoded_pubkey, hardcoded_proof)

        self.log.info("Add a node with a valid delegation")
        assert node.addavalanchenode(
            nodeid,
            dg_pubkey.hex(),
            proof,
            delegation.serialize().hex(),
        )

        self.log.info("Several nodes can share a proof")
        nodeid2 = add_interface_node(node)
        assert node.addavalanchenode(nodeid2, proof_master, proof)
Esempio n. 26
0
    def run_test(self):
        self.log.info("Setup wallets...")
        # w0 is a wallet with coinbase rewards
        w0 = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
        # w1 is a regular wallet
        self.nodes[1].createwallet(wallet_name="w1")
        w1 = self.nodes[1].get_wallet_rpc("w1")
        # w2 contains the private keys for w3
        self.nodes[1].createwallet(wallet_name="w2", blank=True)
        w2 = self.nodes[1].get_wallet_rpc("w2")
        xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v"
        xpub = "tpubD6NzVbkrYhZ4YkEfMbRJkQyZe7wTkbTNRECozCtJPtdLRn6cT1QKb8yHjwAPcAr26eHBFYs5iLiFFnCbwPRsncCKUKCfubHDMGKzMVcN1Jg"
        if self.options.descriptors:
            w2.importdescriptors([{
                "desc": descsum_create("wpkh(" + xpriv + "/0/0/*)"),
                "timestamp": "now",
                "range": [0, 100],
                "active": True
            },{
                "desc": descsum_create("wpkh(" + xpriv + "/0/1/*)"),
                "timestamp": "now",
                "range": [0, 100],
                "active": True,
                "internal": True
            }])
        else:
            w2.sethdseed(True)

        # w3 is a watch-only wallet, based on w2
        self.nodes[1].createwallet(wallet_name="w3", disable_private_keys=True)
        w3 = self.nodes[1].get_wallet_rpc("w3")
        if self.options.descriptors:
            # Match the privkeys in w2 for descriptors
            res = w3.importdescriptors([{
                "desc": descsum_create("wpkh(" + xpub + "/0/0/*)"),
                "timestamp": "now",
                "range": [0, 100],
                "keypool": True,
                "active": True,
                "watchonly": True
            },{
                "desc": descsum_create("wpkh(" + xpub + "/0/1/*)"),
                "timestamp": "now",
                "range": [0, 100],
                "keypool": True,
                "active": True,
                "internal": True,
                "watchonly": True
            }])
            assert_equal(res, [{"success": True}, {"success": True}])

        for _ in range(3):
            a2_receive = w2.getnewaddress()
            if not self.options.descriptors:
                # Because legacy wallets use exclusively hardened derivation, we can't do a ranged import like we do for descriptors
                a2_change = w2.getrawchangeaddress() # doesn't actually use change derivation
                res = w3.importmulti([{
                    "desc": w2.getaddressinfo(a2_receive)["desc"],
                    "timestamp": "now",
                    "keypool": True,
                    "watchonly": True
                },{
                    "desc": w2.getaddressinfo(a2_change)["desc"],
                    "timestamp": "now",
                    "keypool": True,
                    "internal": True,
                    "watchonly": True
                }])
                assert_equal(res, [{"success": True}, {"success": True}])

        w0.sendtoaddress(a2_receive, 10) # fund w3
        self.generate(self.nodes[0], 1)
        self.sync_blocks()

        if not self.options.descriptors:
            # w4 has private keys enabled, but only contains watch-only keys (from w2)
            # This is legacy wallet behavior only as descriptor wallets don't allow watchonly and non-watchonly things in the same wallet.
            self.nodes[1].createwallet(wallet_name="w4", disable_private_keys=False)
            w4 = self.nodes[1].get_wallet_rpc("w4")
            for _ in range(3):
                a2_receive = w2.getnewaddress()
                res = w4.importmulti([{
                    "desc": w2.getaddressinfo(a2_receive)["desc"],
                    "timestamp": "now",
                    "keypool": False,
                    "watchonly": True
                }])
                assert_equal(res, [{"success": True}])

            w0.sendtoaddress(a2_receive, 10) # fund w4
            self.generate(self.nodes[0], 1)
            self.sync_blocks()

        self.log.info("Send to address...")
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True)

        self.log.info("Don't broadcast...")
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False)
        assert(res["hex"])

        self.log.info("Return PSBT...")
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, psbt=True)
        assert(res["psbt"])

        self.log.info("Create transaction that spends to address, but don't broadcast...")
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False)
        # conf_target & estimate_mode can be set as argument or option
        res1 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical", add_to_wallet=False)
        res2 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=1, estimate_mode="economical", add_to_wallet=False)
        assert_equal(self.nodes[1].decodepsbt(res1["psbt"])["fee"],
                     self.nodes[1].decodepsbt(res2["psbt"])["fee"])
        # but not at the same time
        for mode in ["unset", "economical", "conservative"]:
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical",
                conf_target=1, estimate_mode=mode, add_to_wallet=False,
                expect_error=(-8, "Pass conf_target and estimate_mode either as arguments or in the options object, but not both"))

        self.log.info("Create PSBT from watch-only wallet w3, sign with w2...")
        res = self.test_send(from_wallet=w3, to_wallet=w1, amount=1)
        res = w2.walletprocesspsbt(res["psbt"])
        assert res["complete"]

        if not self.options.descriptors:
            # Descriptor wallets do not allow mixed watch-only and non-watch-only things in the same wallet.
            # This is specifically testing that w4 ignores its own private keys and creates a psbt with send
            # which is not something that needs to be tested in descriptor wallets.
            self.log.info("Create PSBT from wallet w4 with watch-only keys, sign with w2...")
            self.test_send(from_wallet=w4, to_wallet=w1, amount=1, expect_error=(-4, "Insufficient funds"))
            res = self.test_send(from_wallet=w4, to_wallet=w1, amount=1, include_watching=True, add_to_wallet=False)
            res = w2.walletprocesspsbt(res["psbt"])
            assert res["complete"]

        self.log.info("Create OP_RETURN...")
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
        self.test_send(from_wallet=w0, data="Hello World", expect_error=(-8, "Data must be hexadecimal string (not 'Hello World')"))
        self.test_send(from_wallet=w0, data="23")
        res = self.test_send(from_wallet=w3, data="23")
        res = w2.walletprocesspsbt(res["psbt"])
        assert res["complete"]

        self.log.info("Test setting explicit fee rate")
        res1 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate="1", add_to_wallet=False)
        res2 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate="1", add_to_wallet=False)
        assert_equal(self.nodes[1].decodepsbt(res1["psbt"])["fee"], self.nodes[1].decodepsbt(res2["psbt"])["fee"])

        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=7, add_to_wallet=False)
        fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
        assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.00007"))

        # "unset" and None are treated the same for estimate_mode
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=2, estimate_mode="unset", add_to_wallet=False)
        fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
        assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.00002"))

        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=4.531, add_to_wallet=False)
        fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
        assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.00004531"))

        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=3, add_to_wallet=False)
        fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
        assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.00003"))

        # Test that passing fee_rate as both an argument and an option raises.
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=1, fee_rate=1, add_to_wallet=False,
                       expect_error=(-8, "Pass the fee_rate either as an argument, or in the options object, but not both"))

        assert_raises_rpc_error(-8, "Use fee_rate (sat/vB) instead of feeRate", w0.send, {w1.getnewaddress(): 1}, 6, "conservative", 1, {"feeRate": 0.01})

        assert_raises_rpc_error(-3, "Unexpected key totalFee", w0.send, {w1.getnewaddress(): 1}, 6, "conservative", 1, {"totalFee": 0.01})

        for target, mode in product([-1, 0, 1009], ["economical", "conservative"]):
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=target, estimate_mode=mode,
                expect_error=(-8, "Invalid conf_target, must be between 1 and 1008"))  # max value of 1008 per src/policy/fees.h
        msg = 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"'
        for target, mode in product([-1, 0], ["btc/kb", "sat/b"]):
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=target, estimate_mode=mode, expect_error=(-8, msg))
        for mode in ["", "foo", Decimal("3.141592")]:
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode=mode, expect_error=(-8, msg))
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=0.1, arg_estimate_mode=mode, expect_error=(-8, msg))
            assert_raises_rpc_error(-8, msg, w0.send, {w1.getnewaddress(): 1}, 0.1, mode)

        for mode in ["economical", "conservative"]:
            for k, v in {"string": "true", "bool": True, "object": {"foo": "bar"}}.items():
                self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=v, estimate_mode=mode,
                    expect_error=(-3, f"Expected type number for conf_target, got {k}"))

        # Test setting explicit fee rate just below the minimum of 1 sat/vB.
        self.log.info("Explicit fee rate raises RPC error 'fee rate too low' if fee_rate of 0.99999999 is passed")
        msg = "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)"
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=0.999, expect_error=(-4, msg))
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=0.999, expect_error=(-4, msg))

        self.log.info("Explicit fee rate raises if invalid fee_rate is passed")
        # Test fee_rate with zero values.
        msg = "Fee rate (0.000 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)"
        for zero_value in [0, 0.000, 0.00000000, "0", "0.000", "0.00000000"]:
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=zero_value, expect_error=(-4, msg))
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=zero_value, expect_error=(-4, msg))
        msg = "Invalid amount"
        # Test fee_rate values that don't pass fixed-point parsing checks.
        for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))
        # Test fee_rate values that cannot be represented in sat/vB.
        for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))
        # Test fee_rate out of range (negative number).
        msg = "Amount out of range"
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=-1, expect_error=(-3, msg))
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=-1, expect_error=(-3, msg))
        # Test type error.
        msg = "Amount is not a number or string"
        for invalid_value in [True, {"foo": "bar"}]:
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
            self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))

        # TODO: Return hex if fee rate is below -maxmempool
        # res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode="sat/b", add_to_wallet=False)
        # assert res["hex"]
        # hex = res["hex"]
        # res = self.nodes[0].testmempoolaccept([hex])
        # assert not res[0]["allowed"]
        # assert_equal(res[0]["reject-reason"], "...") # low fee
        # assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.000001"))

        self.log.info("If inputs are specified, do not automatically add more...")
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[], add_to_wallet=False)
        assert res["complete"]
        utxo1 = w0.listunspent()[0]
        assert_equal(utxo1["amount"], 50)
        self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1],
                       expect_error=(-4, "Insufficient funds"))
        self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=False,
                       expect_error=(-4, "Insufficient funds"))
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=True, add_to_wallet=False)
        assert res["complete"]

        self.log.info("Manual change address and position...")
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, change_address="not an address",
                       expect_error=(-5, "Change address must be a valid bitcoin address"))
        change_address = w0.getnewaddress()
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address)
        assert res["complete"]
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address, change_position=0)
        assert res["complete"]
        assert_equal(self.nodes[0].decodepsbt(res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["address"], change_address)
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_type="legacy", change_position=0)
        assert res["complete"]
        change_address = self.nodes[0].decodepsbt(res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["address"]
        assert change_address[0] == "m" or change_address[0] == "n"

        self.log.info("Set lock time...")
        height = self.nodes[0].getblockchaininfo()["blocks"]
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, locktime=height + 1)
        assert res["complete"]
        assert res["txid"]
        txid = res["txid"]
        # Although the wallet finishes the transaction, it can't be added to the mempool yet:
        hex = self.nodes[0].gettransaction(res["txid"])["hex"]
        res = self.nodes[0].testmempoolaccept([hex])
        assert not res[0]["allowed"]
        assert_equal(res[0]["reject-reason"], "non-final")
        # It shouldn't be confirmed in the next block
        self.generate(self.nodes[0], 1)
        assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 0)
        # The mempool should allow it now:
        res = self.nodes[0].testmempoolaccept([hex])
        assert res[0]["allowed"]
        # Don't wait for wallet to add it to the mempool:
        res = self.nodes[0].sendrawtransaction(hex)
        self.generate(self.nodes[0], 1)
        assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 1)

        self.log.info("Lock unspents...")
        utxo1 = w0.listunspent()[0]
        assert_greater_than(utxo1["amount"], 1)
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False, lock_unspents=True)
        assert res["complete"]
        locked_coins = w0.listlockunspent()
        assert_equal(len(locked_coins), 1)
        # Locked coins are automatically unlocked when manually selected
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False)
        assert res["complete"]

        self.log.info("Replaceable...")
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=True)
        assert res["complete"]
        assert_equal(self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "yes")
        res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=False)
        assert res["complete"]
        assert_equal(self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "no")

        self.log.info("Subtract fee from output")
        self.test_send(from_wallet=w0, to_wallet=w1, amount=1, subtract_fee_from_outputs=[0])

        self.log.info("Include unsafe inputs")
        self.nodes[1].createwallet(wallet_name="w5")
        w5 = self.nodes[1].get_wallet_rpc("w5")
        self.test_send(from_wallet=w0, to_wallet=w5, amount=2)
        self.test_send(from_wallet=w5, to_wallet=w0, amount=1, expect_error=(-4, "Insufficient funds"))
        res = self.test_send(from_wallet=w5, to_wallet=w0, amount=1, include_unsafe=True)
        assert res["complete"]

        self.log.info("External outputs")
        eckey = ECKey()
        eckey.generate()
        privkey = bytes_to_wif(eckey.get_bytes())

        self.nodes[1].createwallet("extsend")
        ext_wallet = self.nodes[1].get_wallet_rpc("extsend")
        self.nodes[1].createwallet("extfund")
        ext_fund = self.nodes[1].get_wallet_rpc("extfund")

        # Make a weird but signable script. sh(pkh()) descriptor accomplishes this
        desc = descsum_create("sh(pkh({}))".format(privkey))
        if self.options.descriptors:
            res = ext_fund.importdescriptors([{"desc": desc, "timestamp": "now"}])
        else:
            res = ext_fund.importmulti([{"desc": desc, "timestamp": "now"}])
        assert res[0]["success"]
        addr = self.nodes[0].deriveaddresses(desc)[0]
        addr_info = ext_fund.getaddressinfo(addr)

        self.nodes[0].sendtoaddress(addr, 10)
        self.nodes[0].sendtoaddress(ext_wallet.getnewaddress(), 10)
        self.generate(self.nodes[0], 6)
        self.sync_all()
        ext_utxo = ext_fund.listunspent(addresses=[addr])[0]

        # An external input without solving data should result in an error
        self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, expect_error=(-4, "Insufficient funds"))

        # But funding should work when the solving data is provided
        res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, solving_data={"pubkeys": [addr_info['pubkey']], "scripts": [addr_info["embedded"]["scriptPubKey"]]})
        signed = ext_wallet.walletprocesspsbt(res["psbt"])
        signed = ext_fund.walletprocesspsbt(res["psbt"])
        assert signed["complete"]
        self.nodes[0].finalizepsbt(signed["psbt"])

        res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, solving_data={"descriptors": [desc]})
        signed = ext_wallet.walletprocesspsbt(res["psbt"])
        signed = ext_fund.walletprocesspsbt(res["psbt"])
        assert signed["complete"]
        self.nodes[0].finalizepsbt(signed["psbt"])
Esempio n. 27
0
    def run_test(self):
        self.log.info("Mining blocks...")
        self.generate(self.nodes[0], COINBASE_MATURITY + 1)

        # address
        address1 = self.nodes[0].getnewaddress()
        # pubkey
        address2 = self.nodes[0].getnewaddress()
        # privkey
        eckey = ECKey()
        eckey.generate()
        address3_privkey = bytes_to_wif(eckey.get_bytes())
        address3 = key_to_p2wpkh(eckey.get_pubkey().get_bytes())
        self.nodes[0].importprivkey(address3_privkey)

        # Check only one address
        address_info = self.nodes[0].getaddressinfo(address1)
        assert_equal(address_info['ismine'], True)

        self.sync_all()

        # Node 1 sync test
        assert_equal(self.nodes[1].getblockcount(), COINBASE_MATURITY + 1)

        # Address Test - before import
        address_info = self.nodes[1].getaddressinfo(address1)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        address_info = self.nodes[1].getaddressinfo(address2)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        address_info = self.nodes[1].getaddressinfo(address3)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)

        # Send funds to self
        txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
        self.generate(self.nodes[0], 1)
        rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
        proof1 = self.nodes[0].gettxoutproof([txnid1])

        txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
        self.generate(self.nodes[0], 1)
        rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
        proof2 = self.nodes[0].gettxoutproof([txnid2])

        txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
        self.generate(self.nodes[0], 1)
        rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
        proof3 = self.nodes[0].gettxoutproof([txnid3])

        self.sync_all()

        # Import with no affiliated address
        assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)

        balance1 = self.nodes[1].getbalance()
        assert_equal(balance1, Decimal(0))

        # Import with affiliated address with no rescan
        self.nodes[1].createwallet('wwatch', disable_private_keys=True)
        wwatch = self.nodes[1].get_wallet_rpc('wwatch')
        wwatch.importaddress(address=address2, rescan=False)
        wwatch.importprunedfunds(rawtransaction=rawtxn2, txoutproof=proof2)
        assert [tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2]

        # Import with private key with no rescan
        w1 = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
        w1.importprivkey(privkey=address3_privkey, rescan=False)
        w1.importprunedfunds(rawtxn3, proof3)
        assert [tx for tx in w1.listtransactions() if tx['txid'] == txnid3]
        balance3 = w1.getbalance()
        assert_equal(balance3, Decimal('0.025'))

        # Addresses Test - after import
        address_info = w1.getaddressinfo(address1)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], False)
        address_info = wwatch.getaddressinfo(address2)
        if self.options.descriptors:
            assert_equal(address_info['iswatchonly'], False)
            assert_equal(address_info['ismine'], True)
        else:
            assert_equal(address_info['iswatchonly'], True)
            assert_equal(address_info['ismine'], False)
        address_info = w1.getaddressinfo(address3)
        assert_equal(address_info['iswatchonly'], False)
        assert_equal(address_info['ismine'], True)

        # Remove transactions
        assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", w1.removeprunedfunds, txnid1)
        assert not [tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid1]

        wwatch.removeprunedfunds(txnid2)
        assert not [tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2]

        w1.removeprunedfunds(txnid3)
        assert not [tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid3]

        # Check various RPC parameter validation errors
        assert_raises_rpc_error(-22, "TX decode failed", w1.importprunedfunds, b'invalid tx'.hex(), proof1)
        assert_raises_rpc_error(-5, "Transaction given doesn't exist in proof", w1.importprunedfunds, rawtxn2, proof1)

        mb = from_hex(CMerkleBlock(), proof1)
        mb.header.hashMerkleRoot = 0xdeadbeef  # cause mismatch between merkle root and merkle block
        assert_raises_rpc_error(-5, "Something wrong with merkleblock", w1.importprunedfunds, rawtxn1, mb.serialize().hex())

        mb = from_hex(CMerkleBlock(), proof1)
        mb.header.nTime += 1  # modify arbitrary block header field to change block hash
        assert_raises_rpc_error(-5, "Block not found in chain", w1.importprunedfunds, rawtxn1, mb.serialize().hex())
Esempio n. 28
0
    def run_test(self):
        node = self.nodes[0]

        self.log.info('Start with empty mempool, and 200 blocks')
        self.mempool_size = 0
        assert_equal(node.getblockcount(), 200)
        assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
        coins = node.listunspent()

        self.log.info('Should not accept garbage to testmempoolaccept')
        assert_raises_rpc_error(
            -3, 'Expected type array, got string',
            lambda: node.testmempoolaccept(rawtxs='ff00baar'))
        assert_raises_rpc_error(
            -8, 'Array must contain between 1 and 25 transactions.',
            lambda: node.testmempoolaccept(rawtxs=['ff22'] * 26))
        assert_raises_rpc_error(
            -8, 'Array must contain between 1 and 25 transactions.',
            lambda: node.testmempoolaccept(rawtxs=[]))
        assert_raises_rpc_error(
            -22, 'TX decode failed',
            lambda: node.testmempoolaccept(rawtxs=['ff00baar']))

        self.log.info('A transaction already in the blockchain')
        coin = coins.pop()  # Pick a random coin(base) to spend
        raw_tx_in_block = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': coin['txid'],
                    'vout': coin['vout']
                }],
                outputs=[{
                    node.getnewaddress(): 0.3
                }, {
                    node.getnewaddress(): 49
                }],
            ))['hex']
        txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block,
                                                maxfeerate=0)
        node.generate(1)
        self.mempool_size = 0
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_in_block,
                'allowed': False,
                'reject-reason': 'txn-already-known'
            }],
            rawtxs=[raw_tx_in_block],
        )

        self.log.info('A transaction not in the mempool')
        fee = Decimal('0.000007')
        raw_tx_0 = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    "txid": txid_in_block,
                    "vout": 0,
                    "sequence": BIP125_SEQUENCE_NUMBER
                }],  # RBF is used later
                outputs=[{
                    node.getnewaddress(): Decimal('0.3') - fee
                }],
            ))['hex']
        tx = tx_from_hex(raw_tx_0)
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': fee
                }
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A final transaction not in the mempool')
        coin = coins.pop()  # Pick a random coin(base) to spend
        output_amount = Decimal('0.025')
        raw_tx_final = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': coin['txid'],
                    'vout': coin['vout'],
                    "sequence": 0xffffffff
                }],  # SEQUENCE_FINAL
                outputs=[{
                    node.getnewaddress(): output_amount
                }],
                locktime=node.getblockcount() + 2000,  # Can be anything
            ))['hex']
        tx = tx_from_hex(raw_tx_final)
        fee_expected = coin['amount'] - output_amount
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': fee_expected
                }
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )
        node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0)
        self.mempool_size += 1

        self.log.info('A transaction in the mempool')
        node.sendrawtransaction(hexstring=raw_tx_0)
        self.mempool_size += 1
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': False,
                'reject-reason': 'txn-already-in-mempool'
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that replaces a mempool transaction')
        tx = tx_from_hex(raw_tx_0)
        tx.vout[0].nValue -= int(fee * COIN)  # Double the fee
        tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1  # Now, opt out of RBF
        raw_tx_0 = node.signrawtransactionwithwallet(
            tx.serialize().hex())['hex']
        tx = tx_from_hex(raw_tx_0)
        txid_0 = tx.rehash()
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': (2 * fee)
                }
            }],
            rawtxs=[raw_tx_0],
        )

        self.log.info('A transaction that conflicts with an unconfirmed tx')
        # Send the transaction that replaces the mempool transaction and opts out of replaceability
        node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
        # take original raw_tx_0
        tx = tx_from_hex(raw_tx_0)
        tx.vout[0].nValue -= int(4 * fee * COIN)  # Set more fee
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'txn-mempool-conflict'
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )

        self.log.info('A transaction with missing inputs, that never existed')
        tx = tx_from_hex(raw_tx_0)
        tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
        # skip re-signing the tx
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info(
            'A transaction with missing inputs, that existed once in the past')
        tx = tx_from_hex(raw_tx_0)
        tx.vin[
            0].prevout.n = 1  # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
        raw_tx_1 = node.signrawtransactionwithwallet(
            tx.serialize().hex())['hex']
        txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0)
        # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
        raw_tx_spend_both = node.signrawtransactionwithwallet(
            node.createrawtransaction(inputs=[
                {
                    'txid': txid_0,
                    'vout': 0
                },
                {
                    'txid': txid_1,
                    'vout': 0
                },
            ],
                                      outputs=[{
                                          node.getnewaddress(): 0.1
                                      }]))['hex']
        txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both,
                                                  maxfeerate=0)
        node.generate(1)
        self.mempool_size = 0
        # Now see if we can add the coins back to the utxo set by sending the exact txs again
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_0,
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[raw_tx_0],
        )
        self.check_mempool_result(
            result_expected=[{
                'txid': txid_1,
                'allowed': False,
                'reject-reason': 'missing-inputs'
            }],
            rawtxs=[raw_tx_1],
        )

        self.log.info('Create a signed "reference" tx for later use')
        raw_tx_reference = node.signrawtransactionwithwallet(
            node.createrawtransaction(
                inputs=[{
                    'txid': txid_spend_both,
                    'vout': 0
                }],
                outputs=[{
                    node.getnewaddress(): 0.05
                }],
            ))['hex']
        tx = tx_from_hex(raw_tx_reference)
        # Reference tx should be valid on itself
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': True,
                'vsize': tx.get_vsize(),
                'fees': {
                    'base': Decimal('0.1') - Decimal('0.05')
                }
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )

        self.log.info('A transaction with no outputs')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout = []
        # Skip re-signing the transaction for context independent checks from now on
        # tx = tx_from_hex(node.signrawtransactionwithwallet(tx.serialize().hex())['hex'])
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-vout-empty'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A really large transaction')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin = [tx.vin[0]] * math.ceil(
            MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize()))
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-oversize'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with negative output value')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].nValue *= -1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-vout-negative'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        # The following two validations prevent overflow of the output amounts (see CVE-2010-5139).
        self.log.info('A transaction with too large output value')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].nValue = MAX_MONEY + 1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-vout-toolarge'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with too large sum of output values')
        tx = tx_from_hex(raw_tx_reference)
        tx.vout = [tx.vout[0]] * 2
        tx.vout[0].nValue = MAX_MONEY
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-txouttotal-toolarge'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction with duplicate inputs')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin = [tx.vin[0]] * 2
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-inputs-duplicate'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A non-coinbase transaction with coinbase-like outpoint')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin.append(CTxIn(COutPoint(hash=0, n=0xffffffff)))
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bad-txns-prevout-null'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A coinbase transaction')
        # Pick the input of the first tx we signed, so it has to be a coinbase tx
        raw_tx_coinbase_spent = node.getrawtransaction(
            txid=node.decoderawtransaction(
                hexstring=raw_tx_in_block)['vin'][0]['txid'])
        tx = tx_from_hex(raw_tx_coinbase_spent)
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'coinbase'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('Some nonstandard transactions')
        tx = tx_from_hex(raw_tx_reference)
        tx.nVersion = 3  # A version currently non-standard
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'version'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].scriptPubKey = CScript([OP_0])  # Some non-standard script
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'scriptpubkey'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        key = ECKey()
        key.generate()
        pubkey = key.get_pubkey().get_bytes()
        tx.vout[0].scriptPubKey = CScript(
            [OP_2, pubkey, pubkey, pubkey, OP_3,
             OP_CHECKMULTISIG])  # Some bare multisig script (2-of-3)
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'bare-multisig'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].scriptSig = CScript([OP_HASH160
                                       ])  # Some not-pushonly scriptSig
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'scriptsig-not-pushonly'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[0].scriptSig = CScript(
            [b'a' * 1648])  # Some too large scriptSig (>1650 bytes)
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'scriptsig-size'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        output_p2sh_burn = CTxOut(nValue=540,
                                  scriptPubKey=script_to_p2sh_script(b'burn'))
        num_scripts = 100000 // len(output_p2sh_burn.serialize(
        ))  # Use enough outputs to make the tx too large for our policy
        tx.vout = [output_p2sh_burn] * num_scripts
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'tx-size'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0] = output_p2sh_burn
        tx.vout[
            0].nValue -= 1  # Make output smaller, such that it is dust for our policy
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'dust'
            }],
            rawtxs=[tx.serialize().hex()],
        )
        tx = tx_from_hex(raw_tx_reference)
        tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
        tx.vout = [tx.vout[0]] * 2
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'multi-op-return'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A timelocked transaction')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[
            0].nSequence -= 1  # Should be non-max, so locktime is not ignored
        tx.nLockTime = node.getblockcount() + 1
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'non-final'
            }],
            rawtxs=[tx.serialize().hex()],
        )

        self.log.info('A transaction that is locked by BIP68 sequence logic')
        tx = tx_from_hex(raw_tx_reference)
        tx.vin[
            0].nSequence = 2  # We could include it in the second block mined from now, but not the very next one
        # Can skip re-signing the tx because of early rejection
        self.check_mempool_result(
            result_expected=[{
                'txid': tx.rehash(),
                'allowed': False,
                'reject-reason': 'non-BIP68-final'
            }],
            rawtxs=[tx.serialize().hex()],
            maxfeerate=0,
        )
Esempio n. 29
0
    def run_test(self):
        node = self.nodes[0]
        node.generate(1)  # Leave IBD for sethdseed

        self.nodes[0].createwallet(wallet_name='w0')
        w0 = node.get_wallet_rpc('w0')
        address1 = w0.getnewaddress()

        self.log.info("Test disableprivatekeys creation.")
        self.nodes[0].createwallet(wallet_name='w1', disable_private_keys=True)
        w1 = node.get_wallet_rpc('w1')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w1.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w1.getrawchangeaddress)
        w1.importpubkey(w0.getaddressinfo(address1)['pubkey'])

        self.log.info('Test that private keys cannot be imported')
        eckey = ECKey()
        eckey.generate()
        privkey = bytes_to_wif(eckey.get_bytes())
        assert_raises_rpc_error(
            -4,
            'Cannot import private keys to a wallet with private keys disabled',
            w1.importprivkey, privkey)
        if self.options.descriptors:
            result = w1.importdescriptors([{
                'desc':
                descsum_create('wpkh(' + privkey + ')'),
                'timestamp':
                'now'
            }])
        else:
            result = w1.importmulti([{
                'scriptPubKey': {
                    'address': key_to_p2wpkh(eckey.get_pubkey().get_bytes())
                },
                'timestamp': 'now',
                'keys': [privkey]
            }])
        assert not result[0]['success']
        assert 'warning' not in result[0]
        assert_equal(result[0]['error']['code'], -4)
        assert_equal(
            result[0]['error']['message'],
            'Cannot import private keys to a wallet with private keys disabled'
        )

        self.log.info("Test blank creation with private keys disabled.")
        self.nodes[0].createwallet(wallet_name='w2',
                                   disable_private_keys=True,
                                   blank=True)
        w2 = node.get_wallet_rpc('w2')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w2.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w2.getrawchangeaddress)
        w2.importpubkey(w0.getaddressinfo(address1)['pubkey'])

        self.log.info("Test blank creation with private keys enabled.")
        self.nodes[0].createwallet(wallet_name='w3',
                                   disable_private_keys=False,
                                   blank=True)
        w3 = node.get_wallet_rpc('w3')
        assert_equal(w3.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w3.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w3.getrawchangeaddress)
        # Import private key
        w3.importprivkey(generate_wif_key())
        # Imported private keys are currently ignored by the keypool
        assert_equal(w3.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w3.getnewaddress)
        # Set the seed
        if self.options.descriptors:
            w3.importdescriptors([{
                'desc':
                descsum_create(
                    'wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/0h/*)'
                ),
                'timestamp':
                'now',
                'active':
                True
            }, {
                'desc':
                descsum_create(
                    'wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/1h/*)'
                ),
                'timestamp':
                'now',
                'active':
                True,
                'internal':
                True
            }])
        else:
            w3.sethdseed()
        assert_equal(w3.getwalletinfo()['keypoolsize'], 1)
        w3.getnewaddress()
        w3.getrawchangeaddress()

        self.log.info(
            "Test blank creation with privkeys enabled and then encryption")
        self.nodes[0].createwallet(wallet_name='w4',
                                   disable_private_keys=False,
                                   blank=True)
        w4 = node.get_wallet_rpc('w4')
        assert_equal(w4.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w4.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w4.getrawchangeaddress)
        # Encrypt the wallet. Nothing should change about the keypool
        w4.encryptwallet('pass')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w4.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w4.getrawchangeaddress)
        # Now set a seed and it should work. Wallet should also be encrypted
        w4.walletpassphrase('pass', 60)
        if self.options.descriptors:
            w4.importdescriptors([{
                'desc':
                descsum_create(
                    'wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/0h/*)'
                ),
                'timestamp':
                'now',
                'active':
                True
            }, {
                'desc':
                descsum_create(
                    'wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/1h/*)'
                ),
                'timestamp':
                'now',
                'active':
                True,
                'internal':
                True
            }])
        else:
            w4.sethdseed()
        w4.getnewaddress()
        w4.getrawchangeaddress()

        self.log.info(
            "Test blank creation with privkeys disabled and then encryption")
        self.nodes[0].createwallet(wallet_name='w5',
                                   disable_private_keys=True,
                                   blank=True)
        w5 = node.get_wallet_rpc('w5')
        assert_equal(w5.getwalletinfo()['keypoolsize'], 0)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w5.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w5.getrawchangeaddress)
        # Encrypt the wallet
        assert_raises_rpc_error(
            -16,
            "Error: wallet does not contain private keys, nothing to encrypt.",
            w5.encryptwallet, 'pass')
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w5.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                w5.getrawchangeaddress)

        self.log.info('New blank and encrypted wallets can be created')
        self.nodes[0].createwallet(wallet_name='wblank',
                                   disable_private_keys=False,
                                   blank=True,
                                   passphrase='thisisapassphrase')
        wblank = node.get_wallet_rpc('wblank')
        assert_raises_rpc_error(
            -13,
            "Error: Please enter the wallet passphrase with walletpassphrase first.",
            wblank.signmessage, "needanargument", "test")
        wblank.walletpassphrase('thisisapassphrase', 60)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                wblank.getnewaddress)
        assert_raises_rpc_error(-4, "Error: This wallet has no available keys",
                                wblank.getrawchangeaddress)

        self.log.info('Test creating a new encrypted wallet.')
        # Born encrypted wallet is created (has keys)
        self.nodes[0].createwallet(wallet_name='w6',
                                   disable_private_keys=False,
                                   blank=False,
                                   passphrase='thisisapassphrase')
        w6 = node.get_wallet_rpc('w6')
        assert_raises_rpc_error(
            -13,
            "Error: Please enter the wallet passphrase with walletpassphrase first.",
            w6.signmessage, "needanargument", "test")
        w6.walletpassphrase('thisisapassphrase', 60)
        w6.signmessage(w6.getnewaddress(''), "test")
        w6.keypoolrefill(1)
        # There should only be 1 key for legacy, 3 for descriptors
        walletinfo = w6.getwalletinfo()
        keys = 3 if self.options.descriptors else 1
        assert_equal(walletinfo['keypoolsize'], keys)
        assert_equal(walletinfo['keypoolsize_hd_internal'], keys)
        # Allow empty passphrase, but there should be a warning
        resp = self.nodes[0].createwallet(wallet_name='w7',
                                          disable_private_keys=False,
                                          blank=False,
                                          passphrase='')
        assert 'Empty string given as passphrase, wallet will not be encrypted.' in resp[
            'warning']
        w7 = node.get_wallet_rpc('w7')
        assert_raises_rpc_error(
            -15,
            'Error: running with an unencrypted wallet, but walletpassphrase was called.',
            w7.walletpassphrase, '', 60)

        self.log.info('Test making a wallet with avoid reuse flag')
        self.nodes[0].createwallet(
            'w8', False, False, '', True
        )  # Use positional arguments to check for bug where avoid_reuse could not be set for wallets without needing them to be encrypted
        w8 = node.get_wallet_rpc('w8')
        assert_raises_rpc_error(
            -15,
            'Error: running with an unencrypted wallet, but walletpassphrase was called.',
            w7.walletpassphrase, '', 60)
        assert_equal(w8.getwalletinfo()["avoid_reuse"], True)

        self.log.info(
            'Using a passphrase with private keys disabled returns error')
        assert_raises_rpc_error(
            -4,
            'Passphrase provided but private keys are disabled. A passphrase is only used to encrypt private keys, so cannot be used for wallets with private keys disabled.',
            self.nodes[0].createwallet,
            wallet_name='w9',
            disable_private_keys=True,
            passphrase='thisisapassphrase')
    def run_test(self):
        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())

        # Build the blockchain
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.block_time = self.nodes[0].getblock(
            self.nodes[0].getbestblockhash())['time'] + 1

        self.blocks = []

        # Get a pubkey for the coinbase TXO
        coinbase_key = ECKey()
        coinbase_key.generate()
        coinbase_pubkey = coinbase_key.get_pubkey().get_bytes()

        # Create the first block with a coinbase output to our key
        height = 1
        block = create_block(self.tip, create_coinbase(height,
                                                       coinbase_pubkey),
                             self.block_time)
        self.blocks.append(block)
        self.block_time += 1
        block.solve()
        # Save the coinbase for later
        self.block1 = block
        self.tip = block.sha256
        height += 1

        # Bury the block 100 deep so the coinbase output is spendable
        for i in range(100):
            block = create_block(self.tip, create_coinbase(height),
                                 self.block_time)
            block.solve()
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        # Create a transaction spending the coinbase output with an invalid (null) signature
        tx = CTransaction()
        tx.vin.append(
            CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b""))
        tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE])))
        tx.calc_sha256()

        block102 = create_block(self.tip, create_coinbase(height),
                                self.block_time)
        self.block_time += 1
        block102.vtx.extend([tx])
        block102.hashMerkleRoot = block102.calc_merkle_root()
        block102.rehash()
        block102.solve()
        self.blocks.append(block102)
        self.tip = block102.sha256
        self.block_time += 1
        height += 1

        # Bury the assumed valid block 2100 deep
        for i in range(2100):
            block = create_block(self.tip, create_coinbase(height),
                                 self.block_time)
            block.nVersion = 4
            block.solve()
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        self.nodes[0].disconnect_p2ps()

        # Start node1 and node2 with assumevalid so they accept a block with a bad signature.
        self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)])
        self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)])

        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
        p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
        p2p2 = self.nodes[2].add_p2p_connection(BaseNode())

        # send header lists to all three nodes
        p2p0.send_header_for_blocks(self.blocks[0:2000])
        p2p0.send_header_for_blocks(self.blocks[2000:])
        p2p1.send_header_for_blocks(self.blocks[0:2000])
        p2p1.send_header_for_blocks(self.blocks[2000:])
        p2p2.send_header_for_blocks(self.blocks[0:200])

        # Send blocks to node0. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p0)
        self.assert_blockchain_height(self.nodes[0], 101)

        # Send all blocks to node1. All blocks will be accepted.
        for i in range(2202):
            p2p1.send_message(msg_block(self.blocks[i]))
        # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
        p2p1.sync_with_ping(960)
        assert_equal(
            self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'],
            2202)

        # Send blocks to node2. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p2)
        self.assert_blockchain_height(self.nodes[2], 101)
Esempio n. 31
0
    def run_test(self):
        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())

        # Build the blockchain
        self.tip = int(self.nodes[0].getbestblockhash(), 16)
        self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1

        self.blocks = []

        # Get a pubkey for the coinbase TXO
        coinbase_key = ECKey()
        coinbase_key.generate()
        coinbase_pubkey = coinbase_key.get_pubkey().get_bytes()

        # Create the first block with a coinbase output to our key
        height = 1
        block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time)
        self.blocks.append(block)
        self.block_time += 1
        block.solve()
        # Save the coinbase for later
        self.block1 = block
        self.tip = block.sha256
        height += 1

        # Bury the block 100 deep so the coinbase output is spendable
        for i in range(100):
            block = create_block(self.tip, create_coinbase(height), self.block_time)
            block.solve()
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        # Create a transaction spending the coinbase output with an invalid (null) signature
        tx = CTransaction()
        tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b""))
        tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE])))
        tx.calc_sha256()

        block102 = create_block(self.tip, create_coinbase(height), self.block_time)
        self.block_time += 1
        block102.vtx.extend([tx])
        block102.hashMerkleRoot = block102.calc_merkle_root()
        block102.rehash()
        block102.solve()
        self.blocks.append(block102)
        self.tip = block102.sha256
        self.block_time += 1
        height += 1

        # Bury the assumed valid block 2100 deep
        for i in range(2100):
            block = create_block(self.tip, create_coinbase(height), self.block_time)
            block.set_base_version(4)
            block.solve()
            self.blocks.append(block)
            self.tip = block.sha256
            self.block_time += 1
            height += 1

        self.nodes[0].disconnect_p2ps()

        # Start node1 and node2 with assumevalid so they accept a block with a bad signature.
        self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)])
        self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)])

        p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
        p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
        p2p2 = self.nodes[2].add_p2p_connection(BaseNode())

        # send header lists to all three nodes
        p2p0.send_header_for_blocks(self.blocks[0:2000])
        p2p0.send_header_for_blocks(self.blocks[2000:])
        p2p1.send_header_for_blocks(self.blocks[0:2000])
        p2p1.send_header_for_blocks(self.blocks[2000:])
        p2p2.send_header_for_blocks(self.blocks[0:200])

        # Send blocks to node0. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p0)
        self.assert_blockchain_height(self.nodes[0], 101)

        # Send all blocks to node1. All blocks will be accepted.
        for i in range(2202):
            p2p1.send_message(msg_block(self.blocks[i]))
        # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
        p2p1.sync_with_ping(200)
        assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)

        # Send blocks to node2. Block 102 will be rejected.
        self.send_blocks_until_disconnected(p2p2)
        self.assert_blockchain_height(self.nodes[2], 101)