def test_xpub_data_comparison(): hdkey1 = HDKey.from_xpub( 'xpub6DCi5iJ57ZPd5qPzvTm5hUt6X23TJdh9H4NjNsNbt7t7UuTMJfawQWsdWRFhfLwkiMkB1rQ4ZJWLB9YBnzR7kbs9N8b2PsKZgKUHQm1X4or' ) # noqa: E501 hdkey2 = HDKey.from_xpub( 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' ) # noqa: E501 xpubdata1 = XpubData(xpub=hdkey1) xpubdata2 = XpubData(xpub=hdkey2) mapping = {xpubdata1: 1} assert not (xpubdata1 == xpubdata2 ) # there is a reason for both queries. In the first assert xpubdata1 != xpubdata2 # implementation they did not both work correctly assert xpubdata1 in mapping assert xpubdata2 not in mapping xpubdata1 = XpubData(xpub=hdkey1) xpubdata2 = XpubData(xpub=hdkey1) assert xpubdata1 == xpubdata2 assert not (xpubdata1 != xpubdata2) xpubdata1 = XpubData(xpub=hdkey1, derivation_path='m') xpubdata2 = XpubData(xpub=hdkey1, derivation_path='m/0/0') assert xpubdata1 != xpubdata2 assert not (xpubdata1 == xpubdata2)
def test_from_xpub_with_conversion(): legacy_xpub = 'xpub6CjniigyzMWgVDHvDpgvsroPkTJeqUbrHJaLHARHmAM8zuAbCjmHpp3QhKTcnnscd6iBDrqmABCJjnpwUW42cQjtvKjaEZRcShHKEVh35Y8' # noqa: E501 legacy_xpub_hdkey = HDKey.from_xpub(xpub=legacy_xpub, path='m') converted_ypub_hdkey = HDKey.from_xpub( xpub=legacy_xpub, xpub_type=XpubType.P2SH_P2WPKH, path='m', ) assert legacy_xpub_hdkey.network == converted_ypub_hdkey.network assert legacy_xpub_hdkey.depth == converted_ypub_hdkey.depth assert legacy_xpub_hdkey.parent_fingerprint == converted_ypub_hdkey.parent_fingerprint assert legacy_xpub_hdkey.chain_code == converted_ypub_hdkey.chain_code assert legacy_xpub_hdkey.fingerprint == converted_ypub_hdkey.fingerprint assert legacy_xpub_hdkey.pubkey == converted_ypub_hdkey.pubkey assert converted_ypub_hdkey.xpub == 'ypub6Xa42PMu934ALWV34BUZ5wttvRT6n6bMCR6Z4ZKB9Aj23zypTPvrSshYiXRCnhXY2jpyyLSKcqYrd5SWCCU3QeRVnfRzpUF6iRLxd55duzL' # noqa: E501 assert converted_ypub_hdkey.hint == 'ypub' converted_zpub_hdkey = HDKey.from_xpub( xpub=legacy_xpub, xpub_type=XpubType.WPKH, path='m', ) assert legacy_xpub_hdkey.network == converted_zpub_hdkey.network assert legacy_xpub_hdkey.depth == converted_zpub_hdkey.depth assert legacy_xpub_hdkey.parent_fingerprint == converted_zpub_hdkey.parent_fingerprint assert legacy_xpub_hdkey.chain_code == converted_zpub_hdkey.chain_code assert legacy_xpub_hdkey.fingerprint == converted_zpub_hdkey.fingerprint assert legacy_xpub_hdkey.pubkey == converted_zpub_hdkey.pubkey assert converted_zpub_hdkey.xpub == 'zpub6rQKL42pHibeBog9tYGBJ2zQ6PbYiiar7XcmqxD4XB6u76o3i46R4wMgjjNnncBTSNwnip2t5VuQWN44utt4Ct76f18RQP4az9Qc1eUEkSY' # noqa: E501 assert converted_zpub_hdkey.hint == 'zpub'
def test_maybe_schedule_xpub_derivation(task_manager, database): xpub = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa: E501 xpub_data = XpubData( xpub=HDKey.from_xpub(xpub=xpub, path='m'), derivation_path='m/0/0', ) database.add_bitcoin_xpub(xpub_data) task_manager.potential_tasks = [ task_manager._maybe_schedule_xpub_derivation ] xpub_derive_patch = patch( 'rotkehlchen.chain.bitcoin.xpub.XpubManager.check_for_new_xpub_addresses', return_value=None, ) timeout = 4 try: with gevent.Timeout(timeout): with xpub_derive_patch as xpub_mock: task_manager.schedule() while True: if xpub_mock.call_count == 1: break gevent.sleep(.2) except gevent.Timeout as e: raise AssertionError( f'xpub derivation query was not scheduled within {timeout} seconds' ) from e # noqa: E501
def test_ypub_to_addresses(): """Test vectors from here: https://iancoleman.io/bip39/""" xpub = 'ypub6WkRUvNhspMCJLiLgeP7oL1pzrJ6wA2tpwsKtXnbmpdAGmHHcC6FeZeF4VurGU14dSjGpF2xLavPhgvCQeXd6JxYgSfbaD1wSUi2XmEsx33' # noqa: E501 root = HDKey.from_xpub(xpub=xpub, path='m') expected_addresses = [ '3C2NiJHhXKvHDkWp2rq8LyE7G1E7VTndst', '34SjMcbLquZ7HmFmQiAHqEHY4mBEbvGeVL', '3J7sT2fbDaF3XrjpWM5GsUyaDr7i7psi88', '36Z62MQfJHF11DWqMMzc3rqLiDFGiVF8CB', '33k4CdyQJFwXQD9giSKyo36mTvE9Y6C9cP', ] for i in range(5): child = root.derive_path(f'm/{i}') assert child.address() == expected_addresses[i] expected_addresses = [ '3EQR3ogLugdAw6gwdQZGfr6bx7vHLPiZo5', '361gjqdsUY8xBzArsR2ksggEWBaztAqhFL', '3HtN6sDxDA1ddnQsvwhvBtQa7JrkuAiVx3', '3LNXdKxd8c5RDbB5XRvGMwc2wfv4v26knu', '35TfgdHP5zqAQHcFwGCw4n2UigBZYx7dmQ', ] for i in range(5): child = root.derive_path(f'm/0/{i}') assert child.address() == expected_addresses[i]
def test_xpub_to_addresses(): """Test vectors from here: https://iancoleman.io/bip39/""" xpub = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa: E501 root = HDKey.from_xpub(xpub=xpub, path='m') expected_addresses = [ '1LZypJUwJJRdfdndwvDmtAjrVYaHko136r', '1MKSdDCtBSXiE49vik8xUG2pTgTGGh5pqe', '1DiF6JoLhsAekqps4HURHNd41ZofQSF1t', '1AMrsvqsJzDq25QnaJzX5BzEvdqQ8T6MkT', '16Ny1KwjEB62XzDsjnS4new32nYXGBAkbt', ] for i in range(5): child = root.derive_path(f'm/{i}') assert child.address() == expected_addresses[i] expected_addresses = [ '1K3WM7WNiyZCkH31eMoEDwEcmnGNvQfZVA', '1L5ic1V3bTJahEdwjufGJ28PjRcMcHWGka', '16zNpyv8KxChtjXnE5nYcPqcXcrSQXX2JW', '1NyBphgGhb29kj8UGjixxJCK5XtKLwFj8A', '12wxFzpjdymPk3xnHmdDLCTXUT9keY3XRd', ] for i in range(5): child = root.derive_path(f'm/0/{i}') assert child.address() == expected_addresses[i]
def _derive_addresses_loop( account_index: int, start_index: int, root: HDKey, ) -> List[XpubDerivedAddressData]: """May raise: - RemoteError: if blockcypher/blockchain.info can't be reached """ step_index = start_index addresses: List[XpubDerivedAddressData] = [] should_continue = True while should_continue: print(f'Account:{account_index} step_index:{step_index}') batch_addresses: List[Tuple[int, BTCAddress]] = [] for idx in range(step_index, step_index + XPUB_ADDRESS_STEP): child = root.derive_child(idx) batch_addresses.append((idx, child.address())) have_tx_mapping = have_bitcoin_transactions( [x[1] for x in batch_addresses]) for idx, address in batch_addresses: have_tx, balance = have_tx_mapping[address] if have_tx: addresses.append( XpubDerivedAddressData( account_index=account_index, derived_index=idx, address=address, balance=balance, )) else: should_continue = False # do one more pass and add any addresses with no transactions before the max index # this is so we can start new address generation from the max index later if len(addresses) != 0: max_index = max(x[0] for x in addresses) for idx, address in batch_addresses[:max_index]: have_tx, balance = have_tx_mapping[address] if not have_tx: addresses.append( XpubDerivedAddressData( account_index=account_index, derived_index=idx, address=address, balance=balance, )) step_index += XPUB_ADDRESS_STEP return addresses
def test_from_bad_xpub(): with pytest.raises(XPUBError): HDKey.from_xpub('ddodod') with pytest.raises(XPUBError): HDKey.from_xpub( 'zpub6quTRdxqWmerHdiWVKZdLMp9FY641F1F171gfT2RS4D1FyHnutwFSMiab58Nbsdu4fXBaFwpy5xyGnKZ8d6xn2j4r4yNmQ3Yp33333333333333yDDxQUo3q' ) # noqa: E501 with pytest.raises(XPUBError): HDKey.from_xpub( 'xpriv68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' ) # noqa: E501 with pytest.raises(XPUBError): HDKey.from_xpub( 'apfiv68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' ) # noqa: E501
def test_zpub_to_addresses(): """Test vectors from here: https://iancoleman.io/bip39/""" zpub = 'zpub6quTRdxqWmerHdiWVKZdLMp9FY641F1F171gfT2RS4D1FyHnutwFSMiab58Nbsdu4fXBaFwpy5xyGnKZ8d6xn2j4r4yNmQ3Yp3yDDxQUo3q' # noqa: E501 root = HDKey.from_xpub(xpub=zpub, path='m') expected_addresses = [ 'bc1qc3qcxs025ka9l6qn0q5cyvmnpwrqw2z49qwrx5', 'bc1qnus7355ecckmeyrmvv56mlm42lxvwa4wuq5aev', 'bc1qup7f8g5k3h5uqzfjed03ztgn8hhe542w69wc0g', 'bc1qr4r8vryfzexvhjrx5fh5uj0s2ead8awpqspqra', 'bc1qm2cy0wg6qej4taaywtfx9ccw02zep08r5295gj', ] for i in range(5): child = root.derive_path(f'm/0/{i}') assert child.address() == expected_addresses[i]
def _deserialize( self, value: str, attr: Optional[str], # pylint: disable=unused-argument data: Optional[Mapping[str, Any]], # pylint: disable=unused-argument **_kwargs: Any, ) -> HDKey: if not isinstance(value, str): raise ValidationError('Xpub should be a string') try: hdkey = HDKey.from_xpub(value, path='m') except XPUBError as e: raise ValidationError(str(e)) return hdkey
def transform_data( # pylint: disable=no-self-use self, data: Dict[str, Any], **_kwargs: Any, ) -> Any: xpub_type_str = data.pop('xpub_type', None) try: xpub_type = None if xpub_type_str is None else XpubType.deserialize(xpub_type_str) xpub_hdkey = HDKey.from_xpub(data['xpub'], xpub_type=xpub_type, path='m') except (DeserializationError, XPUBError) as e: raise ValidationError( f'Failed to initialize an xpub due to {str(e)}', field_name='xpub', ) from e data['xpub'] = xpub_hdkey return data
def test_get_last_xpub_derived_indices(setup_db_for_xpub_tests): db, xpub1, xpub2, xpub3, _ = setup_db_for_xpub_tests # Get index from xpubs existing in the DB that have derived addresses receiving_idx, change_idx = db.get_last_xpub_derived_indices(xpub1) assert receiving_idx == 1 assert change_idx == 0 receiving_idx, change_idx = db.get_last_xpub_derived_indices(xpub2) assert receiving_idx == 0 assert change_idx == 3 # Get index from xpubs existing in the DB that have no derived addresses receiving_idx, change_idx = db.get_last_xpub_derived_indices(xpub3) assert receiving_idx == change_idx == 0 # Get index from unknown xpub (not in DB) xpub = 'xpub6D1ZRhLSRWWGowFT22WJYYJx3GH5wxidsHcEm6NYeXfMAGxKWiQ5dQ8hSz7gdJsE86Lrjf1MN7SCKowZU8VxZ45Z1KeNP5CZ514JbCamRdC' # noqa: E501 derivation_path = 'm/0/0/0' xpub_data = XpubData(xpub=HDKey.from_xpub(xpub=xpub), derivation_path=derivation_path) receiving_idx, change_idx = db.get_last_xpub_derived_indices(xpub_data) assert receiving_idx == change_idx == 0
def setup_db_for_xpub_tests(data_dir, username): msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) data.db.add_tag('public', 'foooo', 'ffffff', '000000') data.db.add_tag('desktop', 'boooo', 'ffffff', '000000') xpub = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa: E501 derivation_path = 'm/0/0/0' xpub_data1 = XpubData( xpub=HDKey.from_xpub(xpub=xpub, path='m'), derivation_path=derivation_path, label='xpub1', tags=['public', 'desktop'], ) data.db.ensure_tags_exist([xpub_data1], action='adding', data_type='bitcoin_xpub') insert_tag_mappings( # if we got tags add them to the xpub cursor=data.db.conn.cursor(), data=[xpub_data1], object_reference_keys=['xpub.xpub', 'derivation_path'], ) data.db.add_bitcoin_xpub(xpub_data1) addr1 = '1LZypJUwJJRdfdndwvDmtAjrVYaHko136r' addr2 = '1MKSdDCtBSXiE49vik8xUG2pTgTGGh5pqe' addr3 = '12wxFzpjdymPk3xnHmdDLCTXUT9keY3XRd' addr4 = '16zNpyv8KxChtjXnE5nYcPqcXcrSQXX2JW' all_addresses = [addr1, addr2, addr3, addr4] account_data = [BlockchainAccountData(x) for x in [addr1, addr2, addr3, addr4]] data.db.add_blockchain_accounts( blockchain=SupportedBlockchain.BITCOIN, account_data=account_data, ) insert_tag_mappings( # if we got tags add them to the existing addresses too cursor=data.db.conn.cursor(), data=account_data, object_reference_keys=['address'], ) data.db.ensure_xpub_mappings_exist( xpub=xpub, derivation_path=derivation_path, derived_addresses_data=[ XpubDerivedAddressData(0, 0, addr1, ZERO), XpubDerivedAddressData(0, 1, addr2, ZERO), ], ) xpub = 'zpub6quTRdxqWmerHdiWVKZdLMp9FY641F1F171gfT2RS4D1FyHnutwFSMiab58Nbsdu4fXBaFwpy5xyGnKZ8d6xn2j4r4yNmQ3Yp3yDDxQUo3q' # noqa: E501 derivation_path = 'm/0' xpub_data2 = XpubData( xpub=HDKey.from_xpub(xpub=xpub, path='m'), derivation_path=derivation_path, ) data.db.add_bitcoin_xpub(xpub_data2) addr1 = 'bc1qc3qcxs025ka9l6qn0q5cyvmnpwrqw2z49qwrx5' addr2 = 'bc1qnus7355ecckmeyrmvv56mlm42lxvwa4wuq5aev' addr3 = 'bc1qup7f8g5k3h5uqzfjed03ztgn8hhe542w69wc0g' addr4 = 'bc1qr4r8vryfzexvhjrx5fh5uj0s2ead8awpqspqra' all_addresses.extend([addr1, addr2, addr3, addr4]) data.db.add_blockchain_accounts( blockchain=SupportedBlockchain.BITCOIN, account_data=[BlockchainAccountData(x) for x in [addr1, addr2, addr3, addr4]], ) data.db.ensure_xpub_mappings_exist( xpub=xpub, derivation_path=derivation_path, derived_addresses_data=[ XpubDerivedAddressData(1, 0, addr1, ZERO), XpubDerivedAddressData(1, 1, addr2, ZERO), XpubDerivedAddressData(1, 2, addr3, ZERO), XpubDerivedAddressData(1, 3, addr4, ZERO), ], ) # Finally also add the same xpub as xpub1 with no derivation path xpub = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa: E501 derivation_path = None xpub_data3 = XpubData( xpub=HDKey.from_xpub(xpub=xpub, path='m'), derivation_path=derivation_path, ) data.db.add_bitcoin_xpub(xpub_data3) return data.db, xpub_data1, xpub_data2, xpub_data3, all_addresses