def test_multisig(self, unsigned_hex, signed1_hex, fully_signed_hex, signed2_hex): tx = Transaction.from_hex(unsigned_hex) keystore1, keystore2 = self.multisig_keystores() # Check serializes the same assert tx.serialize() == unsigned_hex # Sign with keystore 1, then 2 keystore1.sign_transaction(tx, None) result_hex = tx.serialize() assert tx.serialize() == signed1_hex keystore2.sign_transaction(tx, None) assert tx.serialize() == fully_signed_hex # Sign with keystore 2, then 1 tx = Transaction.from_hex(unsigned_hex) keystore2.sign_transaction(tx, None) assert tx.serialize() == signed2_hex keystore1.sign_transaction(tx, None) assert tx.serialize() == fully_signed_hex
def test_add_transaction_update(self): cache = TransactionCache(self.store) tx = Transaction.from_hex(tx_hex_1) data = [ tx.hash(), TxData(height=1295924, position=4, fee=None, date_added=1, date_updated=1), None, TxFlags.Unset, None ] with SynchronousWriter() as writer: cache.add([data], completion_callback=writer.get_callback()) assert writer.succeeded() entry = cache.get_entry(tx.hash()) assert entry is not None assert TxFlags.Unset == entry.flags & TxFlags.STATE_MASK with SynchronousWriter() as writer: cache.add_transaction(tx, TxFlags.StateCleared, completion_callback=writer.get_callback()) assert writer.succeeded() tx_hash = tx.hash() entry = cache.get_entry(tx_hash) assert entry is not None assert cache.have_transaction_data_cached(tx_hash) assert TxFlags.StateCleared == entry.flags & TxFlags.StateCleared
async def test_broadcast_good_response(self, monkeypatch, cli): monkeypatch.setattr(self.rest_server, '_get_account', _fake_get_account_succeeded) monkeypatch.setattr(self.rest_server.app_state.app, '_create_transaction', _fake_create_transaction_succeeded) monkeypatch.setattr(self.rest_server, '_broadcast_transaction', _fake_broadcast_tx) monkeypatch.setattr( self.rest_server.app_state.app, 'get_and_set_frozen_utxos_for_tx', self.rest_server._fake_get_and_set_frozen_utxos_for_tx) monkeypatch.setattr(self.rest_server, 'send_request', self.rest_server._fake_send_request) # mock request network = "test" wallet_name = "wallet_file1.sqlite" index = "1" resp = await cli.request( path=f"/v1/{network}/dapp/wallets/{wallet_name}/" f"{index}/txs/broadcast", method='post', json={"rawtx": rawtx}) # check tx = Transaction.from_hex(rawtx) expected_json = {"value": {"txid": tx.txid()}} assert resp.status == 200, await resp.read() response = await resp.read() assert json.loads(response) == expected_json
def test_tx_unsigned(self): tx = Transaction.from_hex(unsigned_blob) assert tx.version == 1 assert len(tx.inputs) == 1 txin = tx.inputs[0] assert txin.prev_hash.hex( ) == '49f35e43fefd22d8bb9e4b3ff294c6286154c25712baf6ab77b646e5074d6aed' assert txin.prev_idx == 1 assert txin.script_sig.to_hex( ) == '01ff4c53ff0488b21e0000000000000000004f130d773e678a58366711837ec2e33ea601858262f8eaef246a7ebd19909c9a03c3b30e38ca7d797fee1223df1c9827b2a9f3379768f520910260220e0560014600002300' assert txin.sequence == 4294967294 assert txin.value == 20112600 assert txin.signatures == [NO_SIGNATURE] assert txin.x_pubkeys == [ XPublicKey( 'ff0488b21e0000000000000000004f130d773e678a58366711837ec2e33ea601858262f8eaef246a7ebd19909c9a03c3b30e38ca7d797fee1223df1c9827b2a9f3379768f520910260220e0560014600002300' ) ] assert txin.address == address_from_string( '13Vp8Y3hD5Cb6sERfpxePz5vGJizXbWciN') assert txin.threshold == 1 assert tx.outputs == [ TxOutput( 20112408, address_from_string( '1MYXdf4moacvaEKZ57ozerpJ3t9xSeN6LK').to_script()) ] assert tx.locktime == 507231 assert tx.as_dict() == {'hex': unsigned_blob, 'complete': False}
def _fake_create_transaction_succeeded(file_id, message_bytes, child_wallet, password, require_confirmed) -> Tuple[Any, set]: # Todo - test _create_transaction separately tx = Transaction.from_hex(rawtx) frozen_utxos = set([]) return tx, frozen_utxos
def test_update_signatures(self): signed_tx = Tx.from_hex(signed_tx_3) sigs = [next(input.script_sig.ops())[:-1] for input in signed_tx.inputs] tx = Transaction.from_hex(unsigned_tx) tx.update_signatures(sigs) assert tx.is_complete() assert tx.txid() == "b83acf939a92c420d0cb8d45d5d4dfad4e90369ebce0f49a45808dc1b41259b0"
def sign_tx(self, unsigned_tx_hex, priv_keys): keypairs = {XPublicKey(priv_key.public_key.to_hex()): (priv_key.to_bytes(), priv_key.is_compressed()) for priv_key in priv_keys} tx = Transaction.from_hex(unsigned_tx_hex) tx.sign(keypairs) return tx
def get_small_tx() -> Transaction: path = Path(dirname( os.path.realpath(__file__))).joinpath("data/transactions/hello.txt") with open(path, "r") as f: rawtx = f.read() tx = Transaction.from_hex(rawtx) return tx
async def test_create_and_broadcast_good_response(self, monkeypatch, cli): monkeypatch.setattr(self.rest_server, '_get_account', _fake_get_account_succeeded) monkeypatch.setattr(self.rest_server.app_state.app, '_create_transaction', _fake_create_transaction_succeeded) monkeypatch.setattr(self.rest_server, '_broadcast_transaction', _fake_broadcast_tx) monkeypatch.setattr(self.rest_server.app_state.async_, 'spawn', _fake_spawn) monkeypatch.setattr(self.rest_server.app_state.async_, 'spawn', _fake_spawn) monkeypatch.setattr(self.rest_server.app_state.app, 'get_and_set_frozen_utxos_for_tx', self.rest_server._fake_get_and_set_frozen_utxos_for_tx) monkeypatch.setattr(self.rest_server, 'send_request', self.rest_server._fake_send_request) # mock request network = "test" wallet_name = "wallet_file1.sqlite" index = "1" password = "******" resp = await cli.request(path=f"/v1/{network}/dapp/wallets/{wallet_name}/" f"{index}/txs/create_and_broadcast", method='post', json={"outputs": [P2PKH_OUTPUT], "password": password}) # check expected_json = {'txid': Transaction.from_hex(rawtx).txid()} assert resp.status == 200, await resp.read() response = await resp.read() assert json.loads(response) == expected_json
async def broadcast(self, request): """Broadcast a rawtx (hex string) to the network. """ try: required_vars = [VNAME.WALLET_NAME, VNAME.ACCOUNT_ID, VNAME.RAWTX] vars = await self.argparser(request, required_vars=required_vars) wallet_name = vars[VNAME.WALLET_NAME] index = vars[VNAME.ACCOUNT_ID] rawtx = vars[VNAME.RAWTX] account = self._get_account(wallet_name, index) tx = Transaction.from_hex(rawtx) self.raise_for_duplicate_tx(tx) frozen_utxos = self.app_state.app.get_and_set_frozen_utxos_for_tx( tx, account) result = await self._broadcast_transaction(rawtx, tx.hash(), account) self.prev_transaction = result response = {"value": {"txid": result}} return good_response(response) except Fault as e: return fault_to_http_response(e) except aiorpcx.jsonrpc.RPCError as e: account.set_frozen_coin_state(frozen_utxos, False) self.remove_signed_transaction(tx, account) return fault_to_http_response( Fault(Errors.AIORPCX_ERROR_CODE, e.message))
def test_fd_read_write(self): tx_hex = ( '0100000001de8ead15a3044065ed8274b79af5fe7f860f5a026c241e9dd93dd3ce26208aeb010000001' 'd01ff1afd76a9148c16fd67cdf85cdd2b7686081152424159c3eb3388acfeffffffb06eb70000000000' '01f06db700000000001976a9148c16fd67cdf85cdd2b7686081152424159c3eb3388ac7ce40800' ) tx = Transaction.from_hex(tx_hex) assert tx.serialize() == tx_hex
def test_txid_p2sh_to_p2sh(self): # Note the public keys in this transaction are not sorted. This also tests we do # not sort them. tx = Transaction.from_hex( '01000000018695eef2250b3a3b6ef45fe065e601610e69dd7a56de742092d40e6276e6c9ec00000000fdfd000047304402203199bf8e49f7203e8bcbfd754aa356c6ba61643a3490f8aef3888e0aaa7c048c02201e7180bfd670f4404e513359b4020fbc85d6625e3e265e0c357e8611f11b83e401483045022100e60f897db114679f9a310a032a22e9a7c2b8080affe2036c480ff87bf6f45ada02202dbd27af38dd97d418e24d89c3bb7a97e359dd927c1094d8c9e5cac57df704fb014c69522103adc563b9f5e506f485978f4e913c10da208eac6d96d49df4beae469e81a4dd982102c52bc9643a021464a31a3bfa99cfa46afaa4b3acda31e025da204b4ee44cc07a2103a1c8edcc3310b3d7937e9e4179e7bd9cdf31c276f985f4eb356f21b874225eb153aeffffffff02b8ce05000000000017a9145c9c158430b7b79c3ad7ef9bdf981601eda2412d87b82400000000000017a9146bf3ff89019ecc5971a39cdd4f1cabd3b647ad5d8700000000' ) assert '2caab5a11fa1ec0f5bb014b8858d00fecf2c001e15d22ad04379ad7b36fef305' == tx.txid( )
def test_add_transaction(self): cache = TxCache(self.store) tx = Transaction.from_hex(tx_hex_1) cache.add_transaction(tx) self.assertTrue(cache.is_cached(tx.txid())) entry = cache.get_entry(tx.txid()) self.assertEqual(TxFlags.HasByteData, entry.flags & TxFlags.HasByteData) self.assertIsNotNone(entry.bytedata)
def test_add_transaction(self): cache = TransactionCache(self.store) tx = Transaction.from_hex(tx_hex_1) tx_hash = tx.hash() with SynchronousWriter() as writer: cache.add_transaction(tx_hash, tx, completion_callback=writer.get_callback()) assert writer.succeeded() assert cache.is_cached(tx_hash) entry = cache.get_entry(tx_hash) assert TxFlags.HasByteData == entry.flags & TxFlags.HasByteData assert cache.have_transaction_data_cached(tx_hash)
def test_get_unverified_entries_too_high(self): cache = TransactionCache(self.store) tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() data = TxData(height=11, position=22, date_added=1, date_updated=1) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, data, tx_1, TxFlags.StateSettled, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() results = cache.get_unverified_entries(100) assert 0 == len(results)
def on_receive(self, keyhash, message): logger.debug("signal arrived for '%s'", keyhash) for item in self.items: if item.hash == keyhash and not item.watching_only: window = item.window break else: logger.error("keyhash not found") return parent_wallet = window.parent_wallet wallet = parent_wallet.get_default_wallet() if isinstance(wallet.get_keystore(), keystore.Hardware_KeyStore): window.show_warning( _('An encrypted transaction was retrieved from cosigning pool.' ) + '\n' + _('However, hardware wallets do not support message decryption, ' 'which makes them not compatible with the current design of cosigner pool.' )) self.listener.clear(keyhash) return if parent_wallet.has_password(): password = window.password_dialog( _('An encrypted transaction was retrieved from cosigning pool.' ) + '\n' + _('Please enter your password to decrypt it.')) if not password: return else: password = None if not window.question( _("An encrypted transaction was retrieved from cosigning pool." ) + '\n' + _("Do you want to open it now?")): return self.listener.clear(keyhash) xprv = wallet.get_keystore().get_master_private_key(password) if not xprv: return privkey = bip32_key_from_string(xprv) try: message = bh2u(privkey.decrypt_message(message)) except Exception as e: logger.exception("") window.show_error(_('Error decrypting message') + ':\n' + str(e)) return tx = Transaction.from_hex(message) window.show_transaction(tx, prompt_if_unsaved=True)
def get_datacarrier_tx() -> Transaction: """datacarrier tx with one op_return output >6000 bytes and an xpubkey in the input - only for testing obj size calculation""" path = Path(dirname(os.path.realpath(__file__))).joinpath( "data/transactions/data_carrier.txt") with open(path, "r") as f: rawtx = f.read() tx = Transaction.from_hex(rawtx) priv_key_bytes = bitcoinx.PrivateKey( bytes.fromhex( 'a2d9803c912ab380c1491d3bd1aaab34ca06742d7885a224ec8d386182d26ed2') ).public_key.to_bytes() tx.inputs[0].x_pubkeys.append(XPublicKey.from_bytes(priv_key_bytes)) return tx
def test_get_flags(self): cache = TransactionCache(self.store) assert cache.get_flags(os.urandom(10).hex()) is None tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() data = TxData(position=11) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, data, tx_1, TxFlags.StateDispatched, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() assert cache.is_cached(tx_hash_1) assert TxFlags.StateDispatched | TxFlags.HasByteData | TxFlags.HasPosition == \ cache.get_flags(tx_hash_1)
def test_get_entry(self): cache = TransactionCache(self.store) tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() data = TxData(position=11) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, data, tx_1, TxFlags.StateSettled, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() entry = cache.get_entry(tx_hash_1, TxFlags.StateDispatched) assert entry is None entry = cache.get_entry(tx_hash_1, TxFlags.StateSettled) assert entry is not None
async def test_create_tx_good_response(self, monkeypatch, cli): class MockEventLoop: async def run_in_executor(self, *args): tx = Transaction.from_hex(rawtx) frozen_utxos = None return tx, frozen_utxos def get_debug(self): return def _fake_get_event_loop(): return MockEventLoop() monkeypatch.setattr(self.rest_server, '_get_account', _fake_get_account_succeeded) monkeypatch.setattr(self.rest_server.app_state.app, '_create_transaction', _fake_create_transaction_succeeded) monkeypatch.setattr(asyncio, 'get_event_loop', _fake_get_event_loop) monkeypatch.setattr( self.rest_server.app_state.app, 'get_and_set_frozen_utxos_for_tx', self.rest_server._fake_get_and_set_frozen_utxos_for_tx) # mock request network = "test" wallet_name = "wallet_file1.sqlite" index = "1" password = "******" resp = await cli.request( path=f"/v1/{network}/dapp/wallets/{wallet_name}/" f"{index}/txs/create", method='post', json={ "outputs": [P2PKH_OUTPUT], "password": password }) # check expected_json = { "value": { "txid": Transaction.from_hex(rawtx).txid(), "rawtx": rawtx } } assert resp.status == 200, await resp.read() response = await resp.read() assert json.loads(response) == expected_json
def test_get_height(self): cache = TransactionCache(self.store) tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() metadata_1 = TxData(height=11) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, metadata_1, tx_1, TxFlags.StateSettled, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() assert 11 == cache.get_height(tx_hash_1) cache.update_flags(tx_hash_1, TxFlags.StateCleared, TxFlags.HasByteData) assert 11 == cache.get_height(tx_hash_1) cache.update_flags(tx_hash_1, TxFlags.StateReceived, TxFlags.HasByteData) assert None is cache.get_height(tx_hash_1)
def test_uncleared_bytedata_requirements(self) -> None: cache = TransactionCache(self.store) tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() data = TxData(position=11) for state_flag in TRANSACTION_FLAGS: with pytest.raises(wallet_database.InvalidDataError): cache.add([ (tx_hash_1, data, None, state_flag, None) ]) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, data, tx_1, TxFlags.StateSigned, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() # We are applying a clearing of the bytedata, this should be invalid given uncleared. for state_flag in TRANSACTION_FLAGS: with pytest.raises(wallet_database.InvalidDataError): cache.update([ (tx_hash_1, data, None, state_flag | TxFlags.HasByteData) ])
def broadcast_transaction(self, tx_hex: Optional[str] = None, wallet_name: Optional[str] = None, wallet_memo: Optional[str] = None) -> str: wallet = None if wallet_name and wallet_memo: wallet = self._get_wallet(wallet_name) tx = Transaction.from_hex(tx_hex) try: tx_id = app_state.daemon.network.broadcast_transaction_and_wait(tx) except aiorpcx.jsonrpc.RPCError as e: if e.code == 1 and e.message.find("too-long-mempool-chain") != -1: return jsonrpclib.Fault(100, "too-long-mempool-chain") print("raising rpc error", e.code, e.message) raise e if tx.is_complete() and wallet_name and wallet_memo: wallet.set_transaction_label(tx.hash(), wallet_memo) return tx_id
def test_delete(self): cache = TransactionCache(self.store) tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() data = TxData(position=11) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, data, tx_1, TxFlags.StateDispatched, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() assert len(self.store.read_metadata(tx_hashes=[ tx_hash_1 ])) assert cache.is_cached(tx_hash_1) with SynchronousWriter() as writer: cache.delete(tx_hash_1, completion_callback=writer.get_callback()) assert writer.succeeded() assert not len(self.store.read_metadata(tx_hashes=[ tx_hash_1 ])) assert not cache.is_cached(tx_hash_1)
def test_add_transaction_update(self): cache = TxCache(self.store) tx = Transaction.from_hex(tx_hex_1) data = [ tx.txid(), TxData(height=1295924, timestamp=1555296290, position=4, fee=None), None, TxFlags.StateCleared ] cache.add([data]) entry = cache.get_entry(tx.txid()) self.assertIsNotNone(entry) self.assertEqual(TxFlags.StateCleared, entry.flags & TxFlags.StateCleared) cache.add_transaction(tx, TxFlags.StateSettled) entry = cache.get_entry(tx.txid()) self.assertIsNotNone(entry) self.assertIsNotNone(entry.bytedata) self.assertEqual(TxFlags.StateSettled, entry.flags & TxFlags.StateSettled)
def test_tx_signed(self): tx = Transaction.from_hex(signed_blob) assert tx.version == 1 assert len(tx.inputs) == 1 txin = tx.inputs[0] assert txin.prev_hash.hex() == '49f35e43fefd22d8bb9e4b3ff294c6286154c25712baf6ab77b646e5074d6aed' assert txin.prev_idx == 1 assert txin.script_sig.to_hex() == '473044022025bdc804c6fe30966f6822dc25086bc6bb0366016e68e880cf6efd2468921f3202200e665db0404f6d6d9f86f73838306ac55bb0d0f6040ac6047d4e820f24f46885412103b5bbebceeb33c1b61f649596b9c3611c6b2853a1f6b48bce05dd54f667fa2166' assert txin.sequence == 4294967294 assert txin.signatures == [bytes.fromhex('3044022025bdc804c6fe30966f6822dc25086bc6bb0366016e68e880cf6efd2468921f3202200e665db0404f6d6d9f86f73838306ac55bb0d0f6040ac6047d4e820f24f4688541')] assert txin.x_pubkeys == [XPublicKey('03b5bbebceeb33c1b61f649596b9c3611c6b2853a1f6b48bce05dd54f667fa2166')] assert txin.address == Address.from_string('13Vp8Y3hD5Cb6sERfpxePz5vGJizXbWciN') assert txin.threshold == 1 assert tx.outputs == [TxOutput(20112408, Address.from_string( '1MYXdf4moacvaEKZ57ozerpJ3t9xSeN6LK').to_script())] assert tx.locktime == 507231 assert tx.as_dict() == {'hex': signed_blob, 'complete': True} assert tx.serialize() == signed_blob tx.update_signatures(signed_blob) assert tx.estimated_size() == 192
def test_get_unsynced_hashes(self): cache = TransactionCache(self.store) tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() metadata_1 = TxData(height=11) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, metadata_1, None, TxFlags.Unset, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() results = cache.get_unsynced_hashes() assert 1 == len(results) metadata_2 = TxData() with SynchronousWriter() as writer: cache.update([ (tx_hash_1, metadata_2, tx_1, TxFlags.HasByteData) ], completion_callback=writer.get_callback()) assert writer.succeeded() results = cache.get_unsynced_hashes() assert 0 == len(results)
def test_add_then_update(self): cache = TransactionCache(self.store) tx_1 = Transaction.from_hex(tx_hex_1) tx_hash_1 = tx_1.hash() metadata_1 = TxData(position=11) with SynchronousWriter() as writer: cache.add([ (tx_hash_1, metadata_1, tx_1, TxFlags.StateDispatched, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() assert cache.is_cached(tx_hash_1) entry = cache.get_entry(tx_hash_1) assert TxFlags.HasByteData | TxFlags.HasPosition | TxFlags.StateDispatched == entry.flags assert cache.have_transaction_data_cached(tx_hash_1) # NOTE: We are not updating bytedata, and it should remain the same. The flags we pass # into update are treated specially to achieve this. metadata_2 = TxData(fee=10, height=88) propagate_flags = TxFlags.HasFee | TxFlags.HasHeight with SynchronousWriter() as writer: cache.update([ (tx_hash_1, metadata_2, None, propagate_flags | TxFlags.HasPosition) ], completion_callback=writer.get_callback()) assert writer.succeeded() # Check the cache to see that the flags are correct and that bytedata is cached. entry = cache.get_entry(tx_hash_1) expected_flags = propagate_flags | TxFlags.StateDispatched | TxFlags.HasByteData assert expected_flags == entry.flags, \ f"{TxFlags.to_repr(expected_flags)} != {TxFlags.to_repr(entry.flags)}" assert cache.have_transaction_data_cached(tx_hash_1) # Check the store to see that the flags are correct and the bytedata is retained. rows = self.store.read(tx_hashes=[tx_hash_1]) assert 1 == len(rows) get_tx_hash, bytedata_get, flags_get, metadata_get = rows[0] assert tx_1.to_bytes() == bytedata_get assert flags_get & TxFlags.HasByteData != 0
async def test_split_utxos_good_response(self, monkeypatch, cli, spendable_utxos): monkeypatch.setattr(self.rest_server, '_get_account', _fake_get_account_succeeded) monkeypatch.setattr(self.rest_server.app_state.app, 'get_and_set_frozen_utxos_for_tx', _fake_get_frozen_utxos_for_tx) # mock request network = "test" wallet_name = "wallet_file1.sqlite" account_id = "1" password = "******" resp = await cli.request(path=f"/v1/{network}/dapp/wallets/{wallet_name}/" f"{account_id}/txs/split_utxos", method='post', json={"split_count": 10, "desired_utxo_count": 100, "split_value": 3000, "password": password}) # check tx = Transaction.from_hex(rawtx) expected_json = {"txid": tx.txid()} assert resp.status == 200, await resp.read() response = await resp.read() assert json.loads(response) == expected_json
def test_apply_reorg(self) -> None: common_height = 5 cache = TransactionCache(self.store) # Add the transaction that should be reset back to settled, with data fields cleared. tx_y1 = Transaction.from_hex(tx_hex_1) tx_hash_y1 = tx_y1.hash() data_y1 = TxData(height=common_height+1, position=33, fee=44, date_added=1, date_updated=1) with SynchronousWriter() as writer: cache.add([ (tx_hash_y1, data_y1, tx_y1, TxFlags.StateSettled, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() # Add the transaction that would be reset but is below the common height. tx_n1 = Transaction.from_hex(tx_hex_2) tx_hash_n1 = tx_n1.hash() data_n1 = TxData(height=common_height-1, position=33, fee=44, date_added=1, date_updated=1) with SynchronousWriter() as writer: cache.add([ (tx_hash_n1, data_n1, tx_n1, TxFlags.StateSettled, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() # Add the transaction that would be reset but is the common height. tx_n2 = Transaction.from_hex(tx_hex_3) tx_hash_n2 = tx_n2.hash() data_n2 = TxData(height=common_height, position=33, fee=44, date_added=1, date_updated=1) with SynchronousWriter() as writer: cache.add([ (tx_hash_n2, data_n2, tx_n2, TxFlags.StateSettled, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() # Add a canary transaction that should remain untouched due to non-cleared state. tx_n3 = Transaction.from_hex(tx_hex_4) tx_hash_n3 = tx_n3.hash() data_n3 = TxData(height=111, position=333, fee=444, date_added=1, date_updated=1) with SynchronousWriter() as writer: cache.add([ (tx_hash_n3, data_n3, tx_n3, TxFlags.StateDispatched, None) ], completion_callback=writer.get_callback()) assert writer.succeeded() # Delete as if a reorg happened above the suitable but excluded canary transaction. with SynchronousWriter() as writer: cache.apply_reorg(5, completion_callback=writer.get_callback()) assert writer.succeeded() metadata_entries = cache.get_entries(TxFlags.HasByteData, TxFlags.HasByteData) assert 4 == len(metadata_entries) # Affected, canary above common height. y1 = [ m[1] for m in metadata_entries if m[0] == tx_hash_y1 ][0] assert 0 == y1.metadata.height assert None is y1.metadata.position assert data_y1.fee == y1.metadata.fee assert TxFlags.StateCleared | TxFlags.HasByteData | TxFlags.HasFee == y1.flags, \ TxFlags.to_repr(y1.flags) expected_flags = (TxFlags.HasByteData | TxFlags.HasFee | TxFlags.HasHeight | TxFlags.HasPosition) # Skipped, old enough to survive. n1 = [ m[1] for m in metadata_entries if m[0] == tx_hash_n1 ][0] assert data_n1.height == n1.metadata.height assert data_n1.position == n1.metadata.position assert data_n1.fee == n1.metadata.fee assert TxFlags.StateSettled | expected_flags == n1.flags, TxFlags.to_repr(n1.flags) # Skipped, canary common height. n2 = [ m[1] for m in metadata_entries if m[0] == tx_hash_n2 ][0] assert data_n2.height == n2.metadata.height assert data_n2.position == n2.metadata.position assert data_n2.fee == n2.metadata.fee assert TxFlags.StateSettled | expected_flags == n2.flags, TxFlags.to_repr(n2.flags) # Skipped, canary non-cleared. n3 = [ m[1] for m in metadata_entries if m[0] == tx_hash_n3 ][0] assert data_n3.height == n3.metadata.height assert data_n3.position == n3.metadata.position assert data_n3.fee == n3.metadata.fee assert TxFlags.StateDispatched | expected_flags == n3.flags, TxFlags.to_repr(n3.flags)