def parse_output(self, x) -> bytes: try: address = self.parse_address(x) return bfh(bitcoin.address_to_script(address)) except: script = self.parse_script(x) return bfh(script)
def test_encode_decode_msg__missing_optional_field_will_not_appear_in_decoded_dict( self): # "channel_update": optional field "htlc_maximum_msat" missing -> does not get put into dict self.assertEqual( bfh("010200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a62664900d43100006f00025e6ed0830100009000000000000000c8000001f400000023" ), encode_msg( "channel_update", short_channel_id=ShortChannelID.from_components(54321, 111, 2), channel_flags=b'\x00', message_flags=b'\x01', cltv_expiry_delta=144, htlc_minimum_msat=200, fee_base_msat=500, fee_proportional_millionths=35, chain_hash=constants.net.rev_genesis_bytes(), timestamp=1584320643, )) self.assertEqual( ('channel_update', { 'chain_hash': b'\xa0)>N\xeb=\xa6\xe6\xf5o\x81\xedY_W\x88\r\x1a!V\x9e\x13\xee\xfd\xd9Q(KZbfI', 'channel_flags': b'\x00', 'cltv_expiry_delta': 144, 'fee_base_msat': 500, 'fee_proportional_millionths': 35, 'htlc_minimum_msat': 200, 'message_flags': b'\x01', 'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02', 'signature': bytes(64), 'timestamp': 1584320643 }), decode_msg( bfh("010200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a62664900d43100006f00025e6ed0830100009000000000000000c8000001f400000023" )))
def test_verify_fail_f_tx_even(self): """Raise if inner node of merkle branch is valid tx. ('even' fake leaf position)""" # last 32 bytes of T encoded as hash fake_branch_node = hash_encode(bfh(VALID_64_BYTE_TX[64:])) fake_mbranch = [fake_branch_node] + MERKLE_BRANCH # first 32 bytes of T encoded as hash f_tx_hash = hash_encode(bfh(VALID_64_BYTE_TX[:64])) with self.assertRaises(InnerNodeOfSpvProofIsValidTx): SPV.hash_merkle_root(fake_mbranch, f_tx_hash, 6)
def parse_script(self, x): script = '' for word in x.split(): if word[0:3] == 'OP_': opcode_int = opcodes[word] assert opcode_int < 256 # opcode is single-byte script += bitcoin.int_to_hex(opcode_int) else: bfh(word) # to test it is hex data script += push_script(word) return script
def test_decode_onion_error(self): # test vector from bolt-04 payment_path_pubkeys = [ bfh('02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619' ), bfh('0324653eac434488002cc06bbfb7f10fe18991e35f9fe4302dbea6d2353dc0ab1c' ), bfh('027f31ebc5462c1fdce1b737ecff52d37d75dea43ce11c74d25aa297165faa2007' ), bfh('032c0b7cf95324a07d05398b240174dc0c2be444d96b159aa6c7f7b1e668680991' ), bfh('02edabbd16b41c8371b92ef2f04c1185b4f03b6dcd52ba9b78d9d7c89c8f221145' ), ] session_key = bfh( '4141414141414141414141414141414141414141414141414141414141414141') error_packet_for_node_0 = bfh( '9c5add3963fc7f6ed7f148623c84134b5647e1306419dbe2174e523fa9e2fbed3a06a19f899145610741c83ad40b7712aefaddec8c6baf7325d92ea4ca4d1df8bce517f7e54554608bf2bd8071a4f52a7a2f7ffbb1413edad81eeea5785aa9d990f2865dc23b4bc3c301a94eec4eabebca66be5cf638f693ec256aec514620cc28ee4a94bd9565bc4d4962b9d3641d4278fb319ed2b84de5b665f307a2db0f7fbb757366067d88c50f7e829138fde4f78d39b5b5802f1b92a8a820865af5cc79f9f30bc3f461c66af95d13e5e1f0381c184572a91dee1c849048a647a1158cf884064deddbf1b0b88dfe2f791428d0ba0f6fb2f04e14081f69165ae66d9297c118f0907705c9c4954a199bae0bb96fad763d690e7daa6cfda59ba7f2c8d11448b604d12d' ) decoded_error, index_of_sender = _decode_onion_error( error_packet_for_node_0, payment_path_pubkeys, session_key) self.assertEqual( bfh('4c2fc8bc08510334b6833ad9c3e79cd1b52ae59dfe5c2a4b23ead50f09f7ee0b0002200200fe0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' ), decoded_error) self.assertEqual(4, index_of_sender) failure_msg, index_of_sender = decode_onion_error( error_packet_for_node_0, payment_path_pubkeys, session_key) self.assertEqual(4, index_of_sender) self.assertEqual(OnionFailureCode.TEMPORARY_NODE_FAILURE, failure_msg.code) self.assertEqual(b'', failure_msg.data)
def get_noise_map(cls, versioned_seed: VersionedSeed) -> Dict[Tuple[int, int], int]: """Returns a map from (x,y) coordinate to pixel value 0/1, to be used as rawnoise.""" w, h = cls.SIZE version = versioned_seed.version hex_seed = versioned_seed.seed checksum = versioned_seed.checksum noise_map = {} if version == '0': random.seed(int(hex_seed, 16)) for x in range(w): for y in range(h): noise_map[(x, y)] = random.randint(0, 1) elif version == '1': prng_seed = bfh(hex_seed + version + checksum) drbg = DRBG(prng_seed) num_noise_bytes = 1929 # ~ w*h noise_array = bin(int.from_bytes(drbg.generate(num_noise_bytes), 'big'))[2:] # there's an approx 1/1024 chance that the generated number is 'too small' # and we would get IndexError below. easiest backwards compat fix: noise_array += '0' * (w * h - len(noise_array)) i = 0 for x in range(w): for y in range(h): noise_map[(x, y)] = int(noise_array[i]) i += 1 else: raise Exception(f"unexpected revealer version: {version}") return noise_map
def test_encode_decode_msg__ints_can_be_passed_as_bytes(self): self.assertEqual( bfh("010200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a62664900d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00" ), encode_msg( "channel_update", short_channel_id=ShortChannelID.from_components(54321, 111, 2), channel_flags=b'\x00', message_flags=b'\x01', cltv_expiry_delta=int.to_bytes(144, length=2, byteorder="big", signed=False), htlc_minimum_msat=int.to_bytes(200, length=8, byteorder="big", signed=False), htlc_maximum_msat=int.to_bytes(1_000_000_000, length=8, byteorder="big", signed=False), fee_base_msat=int.to_bytes(500, length=4, byteorder="big", signed=False), fee_proportional_millionths=int.to_bytes(35, length=4, byteorder="big", signed=False), chain_hash=constants.net.rev_genesis_bytes(), timestamp=int.to_bytes(1584320643, length=4, byteorder="big", signed=False), ))
def test_write_bigsize_int(self): self.assertEqual(bfh("00"), write_bigsize_int(0)) self.assertEqual(bfh("fc"), write_bigsize_int(252)) self.assertEqual(bfh("fd00fd"), write_bigsize_int(253)) self.assertEqual(bfh("fdffff"), write_bigsize_int(65535)) self.assertEqual(bfh("fe00010000"), write_bigsize_int(65536)) self.assertEqual(bfh("feffffffff"), write_bigsize_int(4294967295)) self.assertEqual(bfh("ff0000000100000000"), write_bigsize_int(4294967296)) self.assertEqual(bfh("ffffffffffffffffff"), write_bigsize_int(18446744073709551615))
def write_kv(ktype, val, key=b''): # serialize helper: write w/ size and key byte out_fd.write(my_var_int(1 + len(key))) out_fd.write(bytes([ktype]) + key) if isinstance(val, str): val = bfh(val) out_fd.write(my_var_int(len(val))) out_fd.write(val)
def test_read_tlv_stream_tests3(self): # from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-stream-decoding-failure lnser = LNSerializer() with self.assertRaises(MsgInvalidFieldOrder): lnser.read_tlv_stream(fd=io.BytesIO( bfh("0208000000000000022601012a")), tlv_stream_name="n1") with self.assertRaises(MsgInvalidFieldOrder): lnser.read_tlv_stream(fd=io.BytesIO( bfh("0208000000000000023102080000000000000451")), tlv_stream_name="n1") with self.assertRaises(MsgInvalidFieldOrder): lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f000f012a")), tlv_stream_name="n1") with self.assertRaises(MsgInvalidFieldOrder): lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f001f012a")), tlv_stream_name="n1") with self.assertRaises(MsgInvalidFieldOrder): lnser.read_tlv_stream(fd=io.BytesIO( bfh("ffffffffffffffffff000000")), tlv_stream_name="n2")
def test_encode_decode_msg__init(self): # "init" is interesting because it has TLVs optionally self.assertEqual( bfh("00100000000220c2"), encode_msg( "init", gflen=0, flen=2, features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT | LnFeatures.GOSSIP_QUERIES_OPT | LnFeatures.GOSSIP_QUERIES_REQ | LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT), )) self.assertEqual( bfh("00100000000220c2"), encode_msg("init", gflen=0, flen=2, features=bfh("20c2"))) self.assertEqual( bfh("00100000000220c20120a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a626649" ), encode_msg( "init", gflen=0, flen=2, features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT | LnFeatures.GOSSIP_QUERIES_OPT | LnFeatures.GOSSIP_QUERIES_REQ | LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT), init_tlvs={ 'networks': { 'chains': b'\xa0)>N\xeb=\xa6\xe6\xf5o\x81\xedY_W\x88\r\x1a!V\x9e\x13\xee\xfd\xd9Q(KZbfI' } })) self.assertEqual(('init', { 'gflen': 2, 'globalfeatures': b'"\x00', 'flen': 3, 'features': b'\x02\xa2\xa1', 'init_tlvs': {} }), decode_msg(bfh("001000022200000302a2a1"))) self.assertEqual( ('init', { 'gflen': 2, 'globalfeatures': b'"\x00', 'flen': 3, 'features': b'\x02\xaa\xa2', 'init_tlvs': { 'networks': { 'chains': b'\xa0)>N\xeb=\xa6\xe6\xf5o\x81\xedY_W\x88\r\x1a!V\x9e\x13\xee\xfd\xd9Q(KZbfI' } } }), decode_msg( bfh("001000022200000302aaa20120a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a626649" )))
def sign_transaction(self, keystore, tx: PartialTransaction, prev_tx): prev_tx = { bfh(txhash): self.electrum_tx_to_txtype(tx) for txhash, tx in prev_tx.items() } client = self.get_client(keystore) inputs = self.tx_inputs(tx, for_sig=True, keystore=keystore) outputs = self.tx_outputs(tx, keystore=keystore) details = SignTx(lock_time=tx.locktime, version=tx.version) signatures, _ = client.sign_tx(self.get_coin_name(), inputs, outputs, details=details, prev_txes=prev_tx) signatures = [(bh2u(x) + '01') for x in signatures] tx.update_signatures(signatures)
def test_encode_decode_msg__missing_mandatory_field_gets_set_to_zeroes( self): # "channel_update": "signature" missing -> gets set to zeroes self.assertEqual( bfh("010200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a62664900d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00" ), encode_msg( "channel_update", short_channel_id=ShortChannelID.from_components(54321, 111, 2), channel_flags=b'\x00', message_flags=b'\x01', cltv_expiry_delta=144, htlc_minimum_msat=200, htlc_maximum_msat=1_000_000_000, fee_base_msat=500, fee_proportional_millionths=35, chain_hash=constants.net.rev_genesis_bytes(), timestamp=1584320643, ))
def show_address(self, wallet, address, keystore: 'Coldcard_KeyStore' = None): if keystore is None: keystore = wallet.get_keystore() if not self.show_address_helper(wallet, address, keystore): return txin_type = wallet.get_txin_type(address) # Standard_Wallet => not multisig, must be bip32 if type(wallet) is Standard_Wallet: sequence = wallet.get_address_index(address) keystore.show_address(sequence, txin_type) elif type(wallet) is Multisig_Wallet: assert isinstance( wallet, Multisig_Wallet) # only here for type-hints in IDE # More involved for P2SH/P2WSH addresses: need M, and all public keys, and their # derivation paths. Must construct script, and track fingerprints+paths for # all those keys pubkey_deriv_info = wallet.get_public_keys_with_deriv_info(address) pubkey_hexes = sorted([pk.hex() for pk in list(pubkey_deriv_info)]) xfp_paths = [] for pubkey in pubkey_deriv_info: ks, der_suffix = pubkey_deriv_info[pubkey] fp_bytes, der_full = ks.get_fp_and_derivation_to_be_used_in_partial_tx( der_suffix, only_der_suffix=False) xfp_int = xfp_int_from_xfp_bytes(fp_bytes) xfp_paths.append([xfp_int] + list(der_full)) script = bfh(wallet.pubkeys_to_scriptcode(pubkey_hexes)) keystore.show_p2sh_address(wallet.m, script, xfp_paths, txin_type) else: keystore.handler.show_error( _('This function is only available for standard wallets when using {}.' ).format(self.device)) return
def test_process_onion_packet_legacy(self): # this test is not from bolt-04, but is based on the one there; # except here we have the privkeys for these pubkeys payment_path_pubkeys = [ bfh('03d75c0ee70f68d73d7d13aeb6261d8ace11416800860c7e59407afe4e2e2d42bb' ), bfh('03960a0b830c7b8e76de745b819f252c62508346196b916f5e813cdb0773283cce' ), bfh('0385620e0a571cbc3552620f8bf1bdcdab2d1a4a59c36fa10b8249114ccbdda40d' ), bfh('02ee242cf6c38b7285f0152c33804ff777f5c51fd352ca8132e845e2cf23b3d8ba' ), bfh('025c585fd2e174bf8245b2b4a119e52a417688904228643ea3edaa1728bf2a258e' ), ] payment_path_privkeys = [ bfh('3463a278617b3dd83f79bda7f97673f12609c54386e1f0d2b67b1c6354fda14e' ), bfh('7e1255fddb52db1729fc3ceb21a46f95b8d9fe94cc83425e936a6c5223bb679d' ), bfh('c7ce8c1462c311eec24dff9e2532ac6241e50ae57e7d1833af21942136972f23' ), bfh('3d885f374d79a5e777459b083f7818cdc9493e5c4994ac9c7b843de8b70be661' ), bfh('dd72ab44729527b7942e195e7a835e7c71f9c0ff61844eb21274d9c26166a8f8' ), ] session_key = bfh( '4141414141414141414141414141414141414141414141414141414141414141') associated_data = bfh( '4242424242424242424242424242424242424242424242424242424242424242') hops_data = [ OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 0 }, "outgoing_cltv_value": { "outgoing_cltv_value": 0 }, "short_channel_id": { "short_channel_id": bfh('0000000000000000') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 1 }, "outgoing_cltv_value": { "outgoing_cltv_value": 1 }, "short_channel_id": { "short_channel_id": bfh('0101010101010101') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 2 }, "outgoing_cltv_value": { "outgoing_cltv_value": 2 }, "short_channel_id": { "short_channel_id": bfh('0202020202020202') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 3 }, "outgoing_cltv_value": { "outgoing_cltv_value": 3 }, "short_channel_id": { "short_channel_id": bfh('0303030303030303') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 4 }, "outgoing_cltv_value": { "outgoing_cltv_value": 4 }, "short_channel_id": { "short_channel_id": bfh('0404040404040404') }, }), ] packet = new_onion_packet(payment_path_pubkeys, session_key, hops_data, associated_data) self.assertEqual( bfh('0002eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f28368661954176cd9869da33d713aa219fcef1e5c806fef11e696bcc66844de8271c27974a049d041ffc5be934b8575c6ff4371f2f88d4edfd73e445534d3f6ae15b64b0d8308390bebf8d149002e31bdc283056477ba27c8054c248ad7306de31663a7c99ec65b251704041f7c4cc40a0016ba172fbf805ec59132a65a4c7eb1f41337931c5df0f840704535729262d30c6132d1b390f073edec8fa057176c6268b6ad06a82ff0229c3be444ee50b40686bc1306838b93c65771de1b6ca05dace1ff9814a6e58b2dd71e8244c83e28b2ed5a3b09e9e7df5c8c747e5765ba366a4f7407a6c6b0a32fb5521cce7cd668f7434c909c1be027d8595d85893e5f612c49a93eeeed80a78bab9c4a621ce0f6f5df7d64a9c8d435db19de192d9db522c7f7b4e201fc1b61a9bd3efd062ae24455d463818b01e2756c7d0691bc3ac4c017be34c9a8b2913bb1b94056bf7a21730afc3f254ffa41ca140a5d87ff470f536d08619e8004d50de2fe5954d6aa4a00570da397ba15ae9ea4d7d1f136256a9093f0a787a36cbb3520b6a3cf4d1b13b16bf399c4b0326da1382a90bd79cf92f4808c8c84eaa50a8ccf44acbde0e35b2e6b72858c8446d6a05f3ba70fb4adc70af27cea9bd1dc1ea35fb3cc236b8b9b69b614903db339b22ad5dc2ddda7ac65fd7de24e60b7dbba7aafc9d26c0f9fcb03f1bb85dfc21762f862620651db52ea703ae60aa7e07febf11caa95c4245a4b37eb9c233e1ab1604fb85849e7f49cb9f7c681c4d91b7c320eb4b89b9c6bcb636ceadda59f6ed47aa5b1ea0a946ea98f6ad2614e79e0d4ef96d6d65903adb0479709e03008bbdf3355dd87df7d68965fdf1ad5c68b6dc2761b96b10f8eb4c0329a646bf38cf4702002e61565231b4ac7a9cde63d23f7b24c9d42797b3c434558d71ed8bf9fcab2c2aee3e8b38c19f9edc3ad3dfe9ebba7387ce4764f97ed1c1a83552dff8315546761479a6f929c39bcca0891d4a967d1b77fa80feed6ae74ac82ed5fb7be225c3f2b0ebdc652afc2255c47bc318ac645bbf19c0819ff527ff6708a78e19c8ca3dc8087035e10d5ac976e84b71148586c8a5a7b26ed11b5b401ce7bb2ac532207eaa24d2f53aaa8024607da764d807c91489e82fcad04e6b8992a507119367f576ee5ffe6807d5723d60234d4c3f94adce0acfed9dba535ca375446a4e9b500b74ad2a66e1c6b0fc38933f282d3a4a877bceceeca52b46e731ca51a9534224a883c4a45587f973f73a22069a4154b1da03d307d8575c821bef0eef87165b9a1bbf902ecfca82ddd805d10fbb7147b496f6772f01e9bf542b00288f3a6efab32590c1f34535ece03a0587ca187d27a98d4c9aa7c044794baa43a81abbe307f51d0bda6e7b4cf62c4be553b176321777e7fd483d6cec16df137293aaf3ad53608e1c7831368675bb9608db04d5c859e7714edab3d2389837fa071f0795adfabc51507b1adbadc7f83e80bd4e4eb9ed1a89c9e0a6dc16f38d55181d5666b02150651961aab34faef97d80fa4e1960864dfec3b687fd4eadf7aa6c709cb4698ae86ae112f386f33731d996b9d41926a2e820c6ba483a61674a4bae03af37e872ffdc0a9a8a034327af17e13e9e7ac619c9188c2a5c12a6ebf887721455c0e2822e67a621ed49f1f50dfc38b71c29d0224954e84ced086c80de552cca3a14adbe43035901225bafc3db3b672c780e4fa12b59221f93690527efc16a28e7c63d1a99fc881f023b03a157076a7e999a715ed37521adb483e2477d75ba5a55d4abad22b024c5317334b6544f15971591c774d896229e4e668fc1c7958fbd76fa0b152a6f14c95692083badd066b6621367fd73d88ba8d860566e6d55b871d80c68296b80ae8847d' ), packet.to_bytes()) for i, privkey in enumerate(payment_path_privkeys): processed_packet = process_onion_packet(packet, associated_data, privkey) self.assertEqual(hops_data[i].to_bytes(), processed_packet.hop_data.to_bytes()) packet = processed_packet.next_packet
def test_process_onion_packet_mixed_payloads(self): # this test is not from bolt-04, but is based on the one there; # here the TLV payloads are actually sane... payment_path_pubkeys = [ bfh('02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619' ), bfh('0324653eac434488002cc06bbfb7f10fe18991e35f9fe4302dbea6d2353dc0ab1c' ), bfh('027f31ebc5462c1fdce1b737ecff52d37d75dea43ce11c74d25aa297165faa2007' ), bfh('032c0b7cf95324a07d05398b240174dc0c2be444d96b159aa6c7f7b1e668680991' ), bfh('02edabbd16b41c8371b92ef2f04c1185b4f03b6dcd52ba9b78d9d7c89c8f221145' ), ] payment_path_privkeys = [ bfh('4141414141414141414141414141414141414141414141414141414141414141' ), bfh('4242424242424242424242424242424242424242424242424242424242424242' ), bfh('4343434343434343434343434343434343434343434343434343434343434343' ), bfh('4444444444444444444444444444444444444444444444444444444444444444' ), bfh('4545454545454545454545454545454545454545454545454545454545454545' ), ] session_key = bfh( '4141414141414141414141414141414141414141414141414141414141414141') associated_data = bfh( '4242424242424242424242424242424242424242424242424242424242424242') hops_data = [ OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 0 }, "outgoing_cltv_value": { "outgoing_cltv_value": 0 }, "short_channel_id": { "short_channel_id": bfh('0000000000000000') }, }), OnionHopsDataSingle(is_tlv_payload=True, payload={ "amt_to_forward": { "amt_to_forward": 1 }, "outgoing_cltv_value": { "outgoing_cltv_value": 1 }, "short_channel_id": { "short_channel_id": bfh('0101010101010101') }, }), OnionHopsDataSingle(is_tlv_payload=True, payload={ "amt_to_forward": { "amt_to_forward": 2 }, "outgoing_cltv_value": { "outgoing_cltv_value": 2 }, "short_channel_id": { "short_channel_id": bfh('0202020202020202') }, }), OnionHopsDataSingle(is_tlv_payload=True, payload={ "amt_to_forward": { "amt_to_forward": 3 }, "outgoing_cltv_value": { "outgoing_cltv_value": 3 }, "short_channel_id": { "short_channel_id": bfh('0303030303030303') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 4 }, "outgoing_cltv_value": { "outgoing_cltv_value": 4 }, "short_channel_id": { "short_channel_id": bfh('0404040404040404') }, }), ] packet = new_onion_packet(payment_path_pubkeys, session_key, hops_data, associated_data) self.assertEqual( bfh('0002eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619e5f14350c2a76fc232b5e46d421e9615471ab9e0bc887beff8c95fdb878f7b3a71bde5adfa90b337f34616d8673d09dd055937273045566ce537ffbe3f9d1f263dc10c7d61ae590536c609010079a232a247922a5395359a63dfbefb85f40317e23254f3023f7d4a98f746c9ab06647645ce55c67308e3c77dc87a1caeac51b03b23c60f05e536e1d757c8c1093e34accfc4f97b5920f6dd2069d5b9ddbb384c3ac575e999a92a4434470ab0aa040c4c3cace3162a405842a88be783e64fad54bd6727c23fc446b7ec0dc3eec5a03eb6c70ec2784911c9e6d274322ec465f0972eb8e771b149f319582ba64dbc2b8e56a3ea79002801c09354f1541cf79bd1dccf5d6bd6b6bacc87a0f24ce497e14e8037e5a79fb4d9ca63fe47f17765963e8f17468a5eaec19a6cca2bfc4e4a366fea3a92112a945856be55e45197ecbab523025e7589529c30cc8addc8fa39d23ef64fa2e51a219c3bd4d3c484832f8e5af16bc46cdba0403991f4fc1b74beef857acf15fefed82ac8678ca66d26262c681beddfdb485aa498813b1a6c5833f1339c1a35244ab76baa0ccaf681ec1f54004e387063335648a77b65d90dde74f1c4b0a729ca25fa53256f7db6d35818b4e5910ba78ec69cf3646bf248ef46cf9cc33062662de2afe4dcf005951b85fd759429fa1ae490b78b14132ccb791232a6c680f03634c0136817f51bf9603a0dba405e7b347830be4327fceccd4734456842b82cf6275393b279bc6ac93d743e00a2d6042960089f70c782ce554b9f73eeeefeea50df7f6f80de1c4e869a7b502f9a5df30d1175402fa780812d35c6d489a30bb0cea53a1088669a238cccf416ecb37f8d8e6ea1327b64979d48e937db69a44a902923a75113685a4aca4a8d9c62b388b48d9c9e2ab9c2df4d529223144de6e16f2dd95a063da79163b3fe006a80263cde4410648f7c3e1f4a7707f82eb0e209002d972c7e57b4ff8ce063fa7b4140f52f569f0cc8793a97a170613efb6b27ba3a0370f8ea74fc0d6aabba54e0ee967abc70e87b580d2aac244236b7752db9d83b159afc1faf6b44b697643235bf59e99f43428caff409d26b9139538865b1f5cf4699f9296088aca461209024ad1dd00e3566e4fde2117b7b3ffced6696b735816a00199890056de86dcbb1b930228143dbf04f07c0eb34370089ea55c43b2c4546cbe1ff0c3a6217d994af9b4225f4b5acb1e3129f5f5b98d381a4692a8561c670b2ee95869f9614e76bb07f623c5194e1c9d26334026f8f5437ec1cde526f914fa094a465f0adcea32b79bfa44d2562536b0d8366da9ee577666c1d5e39615444ca5c900b8199fafac002b8235688eaa0c6887475a913b37d9a4ed43a894ea4576102e5d475ae0b962240ea95fc367b7ac214a4f8682448a9c0d2eea35727bdedc235a975ecc8148a5b03d6291a051dbefe19c8b344d2713c6664dd94ced53c6be39a837fbf1169cca6a12b0a2710f443ba1afeecb51e94236b2a6ed1c2f365b595443b1515de86dcb8c67282807789b47c331cde2fdd721262bef165fa96b7919d11bc5f2022f5affffdd747c7dbe3de8add829a0a8913519fdf7dba4e8a7a25456d2d559746d39ea6ffa31c7b904792fb734bba30f2e1adf7457a994513a1807785fe7b22bf419d1f407f8e2db8b22c0512b078c0cfdfd599e6c4a9d0cc624b9e24b87f30541c3248cd6643df15d251775cc457df4ea6b4e4c5990d87541028c6f0eb28502db1c11a92797168d0b68cb0a0d345b3a3ad05fc4016862f403c64670c41a2c0c6d4e384f5f7da6a204a24530a51182fd7164f120e74a78decb1ab6cda6b9cfc68ac0a35f7a57e750ead65a8e0429cc16e733b9e4feaea25d06c1a4768' ), packet.to_bytes()) for i, privkey in enumerate(payment_path_privkeys): processed_packet = process_onion_packet(packet, associated_data, privkey) self.assertEqual(hops_data[i].to_bytes(), processed_packet.hop_data.to_bytes()) packet = processed_packet.next_packet
def test_new_onion_packet_mixed_payloads(self): # test vector from bolt-04 payment_path_pubkeys = [ bfh('02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619' ), bfh('0324653eac434488002cc06bbfb7f10fe18991e35f9fe4302dbea6d2353dc0ab1c' ), bfh('027f31ebc5462c1fdce1b737ecff52d37d75dea43ce11c74d25aa297165faa2007' ), bfh('032c0b7cf95324a07d05398b240174dc0c2be444d96b159aa6c7f7b1e668680991' ), bfh('02edabbd16b41c8371b92ef2f04c1185b4f03b6dcd52ba9b78d9d7c89c8f221145' ), ] session_key = bfh( '4141414141414141414141414141414141414141414141414141414141414141') associated_data = bfh( '4242424242424242424242424242424242424242424242424242424242424242') hops_data = [ OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 0 }, "outgoing_cltv_value": { "outgoing_cltv_value": 0 }, "short_channel_id": { "short_channel_id": bfh('0000000000000000') }, }), OnionHopsDataSingle(is_tlv_payload=True), OnionHopsDataSingle(is_tlv_payload=True), OnionHopsDataSingle(is_tlv_payload=True), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 4 }, "outgoing_cltv_value": { "outgoing_cltv_value": 4 }, "short_channel_id": { "short_channel_id": bfh('0404040404040404') }, }), ] hops_data[1]._raw_bytes_payload = bfh( "0101010101010101000000000000000100000001") hops_data[2]._raw_bytes_payload = bfh( "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" ) hops_data[3]._raw_bytes_payload = bfh( "0303030303030303000000000000000300000003") packet = new_onion_packet(payment_path_pubkeys, session_key, hops_data, associated_data) self.assertEqual( bfh('0002eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619e5f14350c2a76fc232b5e46d421e9615471ab9e0bc887beff8c95fdb878f7b3a710f8eaf9ccc768f66bb5dec1f7827f33c43fe2ddd05614c8283aa78e9e7573f87c50f7d61ab590531cf08000178a333a347f8b4072e1cea42da7552402b10765adae3f581408f35ff0a71a34b78b1d8ecae77df96c6404bae9a8e8d7178977d7094a1ae549f89338c0777551f874159eb42d3a59fb9285ad4e24883f27de23942ec966611e99bee1cee503455be9e8e642cef6cef7b9864130f692283f8a973d47a8f1c1726b6e59969385975c766e35737c8d76388b64f748ee7943ffb0e2ee45c57a1abc40762ae598723d21bd184e2b338f68ebff47219357bd19cd7e01e2337b806ef4d717888e129e59cd3dc31e6201ccb2fd6d7499836f37a993262468bcb3a4dcd03a22818aca49c6b7b9b8e9e870045631d8e039b066ff86e0d1b7291f71cefa7264c70404a8e538b566c17ccc5feab231401e6c08a01bd5edfc1aa8e3e533b96e82d1f91118d508924b923531929aea889fcdf057f5995d9731c4bf796fb0e41c885d488dcbc68eb742e27f44310b276edc6f652658149e7e9ced4edde5d38c9b8f92e16f6b4ab13d710ee5c193921909bdd75db331cd9d7581a39fca50814ed8d9d402b86e7f8f6ac2f3bca8e6fe47eb45fbdd3be21a8a8d200797eae3c9a0497132f92410d804977408494dff49dd3d8bce248e0b74fd9e6f0f7102c25ddfa02bd9ad9f746abbfa3379834bc2380d58e9d23237821475a1874484783a15d68f47d3dc339f38d9bf925655d5c946778680fd6d1f062f84128895aff09d35d6c92cca63d3f95a9ee8f2a84f383b4d6a087533e65de12fc8dcaf85777736a2088ff4b22462265028695b37e70963c10df8ef2458756c73007dc3e544340927f9e9f5ea4816a9fd9832c311d122e9512739a6b4714bba590e31caa143ce83cb84b36c738c60c3190ff70cd9ac286a9fd2ab619399b68f1f7447be376ce884b5913c8496d01cbf7a44a60b6e6747513f69dc538f340bc1388e0fde5d0c1db50a4dcb9cc0576e0e2474e4853af9623212578d502757ffb2e0e749695ed70f61c116560d0d4154b64dcf3cbf3c91d89fb6dd004dc19588e3479fcc63c394a4f9e8a3b8b961fce8a532304f1337f1a697a1bb14b94d2953f39b73b6a3125d24f27fcd4f60437881185370bde68a5454d816e7a70d4cea582effab9a4f1b730437e35f7a5c4b769c7b72f0346887c1e63576b2f1e2b3706142586883f8cf3a23595cc8e35a52ad290afd8d2f8bcd5b4c1b891583a4159af7110ecde092079209c6ec46d2bda60b04c519bb8bc6dffb5c87f310814ef2f3003671b3c90ddf5d0173a70504c2280d31f17c061f4bb12a978122c8a2a618bb7d1edcf14f84bf0fa181798b826a254fca8b6d7c81e0beb01bd77f6461be3c8647301d02b04753b0771105986aa0cbc13f7718d64e1b3437e8eef1d319359914a7932548c91570ef3ea741083ca5be5ff43c6d9444d29df06f76ec3dc936e3d180f4b6d0fbc495487c7d44d7c8fe4a70d5ff1461d0d9593f3f898c919c363fa18341ce9dae54f898ccf3fe792136682272941563387263c51b2a2f32363b804672cc158c9230472b554090a661aa81525d11876eefdcc45442249e61e07284592f1606491de5c0324d3af4be035d7ede75b957e879e9770cdde2e1bbc1ef75d45fe555f1ff6ac296a2f648eeee59c7c08260226ea333c285bcf37a9bbfa57ba2ab8083c4be6fc2ebe279537d22da96a07392908cf22b233337a74fe5c603b51712b43c3ee55010ee3d44dd9ba82bba3145ec358f863e04bbfa53799a7a9216718fd5859da2f0deb77b8e315ad6868fdec9400f45a48e6dc8ddbaeb3' ), packet.to_bytes())
def test_find_path_for_payment(self): class fake_network: config = self.config asyncio_loop = asyncio.get_event_loop() trigger_callback = lambda *args: None register_callback = lambda *args: None interface = None fake_network.channel_db = lnrouter.ChannelDB(fake_network()) fake_network.channel_db.data_loaded.set() cdb = fake_network.channel_db path_finder = lnrouter.LNPathFinder(cdb) self.assertEqual(cdb.num_channels, 0) cdb.add_channel_announcement( { 'node_id_1': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', 'node_id_2': b'\x02cccccccccccccccccccccccccccccccc', 'bitcoin_key_1': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', 'bitcoin_key_2': b'\x02cccccccccccccccccccccccccccccccc', 'short_channel_id': bfh('0000000000000001'), 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'len': 0, 'features': b'' }, trusted=True) self.assertEqual(cdb.num_channels, 1) cdb.add_channel_announcement( { 'node_id_1': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', 'node_id_2': b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee', 'bitcoin_key_1': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', 'bitcoin_key_2': b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee', 'short_channel_id': bfh('0000000000000002'), 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'len': 0, 'features': b'' }, trusted=True) cdb.add_channel_announcement( { 'node_id_1': b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'node_id_2': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', 'bitcoin_key_1': b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'bitcoin_key_2': b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', 'short_channel_id': bfh('0000000000000003'), 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'len': 0, 'features': b'' }, trusted=True) cdb.add_channel_announcement( { 'node_id_1': b'\x02cccccccccccccccccccccccccccccccc', 'node_id_2': b'\x02dddddddddddddddddddddddddddddddd', 'bitcoin_key_1': b'\x02cccccccccccccccccccccccccccccccc', 'bitcoin_key_2': b'\x02dddddddddddddddddddddddddddddddd', 'short_channel_id': bfh('0000000000000004'), 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'len': 0, 'features': b'' }, trusted=True) cdb.add_channel_announcement( { 'node_id_1': b'\x02dddddddddddddddddddddddddddddddd', 'node_id_2': b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee', 'bitcoin_key_1': b'\x02dddddddddddddddddddddddddddddddd', 'bitcoin_key_2': b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee', 'short_channel_id': bfh('0000000000000005'), 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'len': 0, 'features': b'' }, trusted=True) cdb.add_channel_announcement( { 'node_id_1': b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'node_id_2': b'\x02dddddddddddddddddddddddddddddddd', 'bitcoin_key_1': b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'bitcoin_key_2': b'\x02dddddddddddddddddddddddddddddddd', 'short_channel_id': bfh('0000000000000006'), 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'len': 0, 'features': b'' }, trusted=True) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000001'), 'message_flags': b'\x00', 'channel_flags': b'\x00', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000001'), 'message_flags': b'\x00', 'channel_flags': b'\x01', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000002'), 'message_flags': b'\x00', 'channel_flags': b'\x00', 'cltv_expiry_delta': 99, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000002'), 'message_flags': b'\x00', 'channel_flags': b'\x01', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000003'), 'message_flags': b'\x00', 'channel_flags': b'\x01', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000003'), 'message_flags': b'\x00', 'channel_flags': b'\x00', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000004'), 'message_flags': b'\x00', 'channel_flags': b'\x01', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000004'), 'message_flags': b'\x00', 'channel_flags': b'\x00', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000005'), 'message_flags': b'\x00', 'channel_flags': b'\x01', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000005'), 'message_flags': b'\x00', 'channel_flags': b'\x00', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 999, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000006'), 'message_flags': b'\x00', 'channel_flags': b'\x00', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 99999999, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) cdb.add_channel_update({ 'short_channel_id': bfh('0000000000000006'), 'message_flags': b'\x00', 'channel_flags': b'\x01', 'cltv_expiry_delta': 10, 'htlc_minimum_msat': 250, 'fee_base_msat': 100, 'fee_proportional_millionths': 150, 'chain_hash': BitcoinTestnet.rev_genesis_bytes(), 'timestamp': 0 }) path = path_finder.find_path_for_payment( b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee', 100000) self.assertEqual([ PathEdge(node_id=b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', short_channel_id=bfh('0000000000000003')), PathEdge(node_id=b'\x02eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee', short_channel_id=bfh('0000000000000002')), ], path) start_node = b'\x02aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' route = path_finder.create_route_from_path(path, start_node) self.assertEqual(b'\x02bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', route[0].node_id) self.assertEqual(bfh('0000000000000003'), route[0].short_channel_id) # need to duplicate tear_down here, as we also need to wait for the sql thread to stop self.asyncio_loop.call_soon_threadsafe(self._stop_loop.set_result, 1) self._loop_thread.join(timeout=1) cdb.sql_thread.join(timeout=1)
def test_new_onion_packet_legacy(self): # test vector from bolt-04 payment_path_pubkeys = [ bfh('02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619' ), bfh('0324653eac434488002cc06bbfb7f10fe18991e35f9fe4302dbea6d2353dc0ab1c' ), bfh('027f31ebc5462c1fdce1b737ecff52d37d75dea43ce11c74d25aa297165faa2007' ), bfh('032c0b7cf95324a07d05398b240174dc0c2be444d96b159aa6c7f7b1e668680991' ), bfh('02edabbd16b41c8371b92ef2f04c1185b4f03b6dcd52ba9b78d9d7c89c8f221145' ), ] session_key = bfh( '4141414141414141414141414141414141414141414141414141414141414141') associated_data = bfh( '4242424242424242424242424242424242424242424242424242424242424242') hops_data = [ OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 0 }, "outgoing_cltv_value": { "outgoing_cltv_value": 0 }, "short_channel_id": { "short_channel_id": bfh('0000000000000000') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 1 }, "outgoing_cltv_value": { "outgoing_cltv_value": 1 }, "short_channel_id": { "short_channel_id": bfh('0101010101010101') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 2 }, "outgoing_cltv_value": { "outgoing_cltv_value": 2 }, "short_channel_id": { "short_channel_id": bfh('0202020202020202') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 3 }, "outgoing_cltv_value": { "outgoing_cltv_value": 3 }, "short_channel_id": { "short_channel_id": bfh('0303030303030303') }, }), OnionHopsDataSingle(is_tlv_payload=False, payload={ "amt_to_forward": { "amt_to_forward": 4 }, "outgoing_cltv_value": { "outgoing_cltv_value": 4 }, "short_channel_id": { "short_channel_id": bfh('0404040404040404') }, }), ] packet = new_onion_packet(payment_path_pubkeys, session_key, hops_data, associated_data) self.assertEqual( bfh('0002eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619e5f14350c2a76fc232b5e46d421e9615471ab9e0bc887beff8c95fdb878f7b3a71e87f9aab8f6378c6ff744c1f34b393ad28d065b535c1a8668d85d3b34a1b3befd10f7d61ab590531cf08000178a333a347f8b4072e216400406bdf3bf038659793a1f9e7abc789266cc861cabd95818c0fc8efbdfdc14e3f7c2bc7eb8d6a79ef75ce721caad69320c3a469a202f3e468c67eaf7a7cda226d0fd32f7b48084dca885d014698cf05d742557763d9cb743faeae65dcc79dddaecf27fe5942be5380d15e9a1ec866abe044a9ad635778ba61fc0776dc832b39451bd5d35072d2269cf9b040a2a2fba158a0d8085926dc2e44f0c88bf487da56e13ef2d5e676a8589881b4869ed4c7f0218ff8c6c7dd7221d189c65b3b9aaa71a01484b122846c7c7b57e02e679ea8469b70e14fe4f70fee4d87b910cf144be6fe48eef24da475c0b0bcc6565a9f99728426ce2380a9580e2a9442481ceae7679906c30b1a0e21a10f26150e0645ab6edfdab1ce8f8bea7b1dee511c5fd38ac0e702c1c15bb86b52bca1b71e15b96982d262a442024c33ceb7dd8f949063c2e5e613e873250e2f8708bd4e1924abd45f65c2fa5617bfb10ee9e4a42d6b5811acc8029c16274f937dac9e8817c7e579fdb767ffe277f26d413ced06b620ede8362081da21cf67c2ca9d6f15fe5bc05f82f5bb93f8916bad3d63338ca824f3bbc11b57ce94a5fa1bc239533679903d6fec92a8c792fd86e2960188c14f21e399cfd72a50c620e10aefc6249360b463df9a89bf6836f4f26359207b765578e5ed76ae9f31b1cc48324be576e3d8e44d217445dba466f9b6293fdf05448584eb64f61e02903f834518622b7d4732471c6e0e22e22d1f45e31f0509eab39cdea5980a492a1da2aaac55a98a01216cd4bfe7abaa682af0fbff2dfed030ba28f1285df750e4d3477190dd193f8643b61d8ac1c427d590badb1f61a05d480908fbdc7c6f0502dd0c4abb51d725e92f95da2a8facb79881a844e2026911adcc659d1fb20a2fce63787c8bb0d9f6789c4b231c76da81c3f0718eb7156565a081d2be6b4170c0e0bcebddd459f53db2590c974bca0d705c055dee8c629bf854a5d58edc85228499ec6dde80cce4c8910b81b1e9e8b0f43bd39c8d69c3a80672729b7dc952dd9448688b6bd06afc2d2819cda80b66c57b52ccf7ac1a86601410d18d0c732f69de792e0894a9541684ef174de766fd4ce55efea8f53812867be6a391ac865802dbc26d93959df327ec2667c7256aa5a1d3c45a69a6158f285d6c97c3b8eedb09527848500517995a9eae4cd911df531544c77f5a9a2f22313e3eb72ca7a07dba243476bc926992e0d1e58b4a2fc8c7b01e0cad726237933ea319bad7537d39f3ed635d1e6c1d29e97b3d2160a09e30ee2b65ac5bce00996a73c008bcf351cecb97b6833b6d121dcf4644260b2946ea204732ac9954b228f0beaa15071930fd9583dfc466d12b5f0eeeba6dcf23d5ce8ae62ee5796359d97a4a15955c778d868d0ef9991d9f2833b5bb66119c5f8b396fd108baed7906cbb3cc376d13551caed97fece6f42a4c908ee279f1127fda1dd3ee77d8de0a6f3c135fa3f1cffe38591b6738dc97b55f0acc52be9753ce53e64d7e497bb00ca6123758df3b68fad99e35c04389f7514a8e36039f541598a417275e77869989782325a15b5342ac5011ff07af698584b476b35d941a4981eac590a07a092bb50342da5d3341f901aa07964a8d02b623c7b106dd0ae50bfa007a22d46c8772fa55558176602946cb1d11ea5460db7586fb89c6d3bcd3ab6dd20df4a4db63d2e7d52380800ad812b8640887e027e946df96488b47fbc4a4fadaa8beda4abe446fafea5403fae2ef' ), packet.to_bytes())
def sign_transaction(self, tx, password): if tx.is_complete(): return try: p2pkhTransaction = True inputhasharray = [] hasharray = [] pubkeyarray = [] # Build hasharray from inputs for i, txin in enumerate(tx.inputs()): if txin.is_coinbase_input(): self.give_error("Coinbase not supported") # should never happen if txin.script_type != 'p2pkh': p2pkhTransaction = False my_pubkey, inputPath = self.find_my_pubkey_in_txinout(txin) if not inputPath: self.give_error("No matching pubkey for sign_transaction") # should never happen inputPath = convert_bip32_intpath_to_strpath(inputPath) inputHash = sha256d(bfh(tx.serialize_preimage(i))) hasharray_i = {'hash': to_hexstr(inputHash), 'keypath': inputPath} hasharray.append(hasharray_i) inputhasharray.append(inputHash) # Build pubkeyarray from outputs for txout in tx.outputs(): assert txout.address if txout.is_change: changePubkey, changePath = self.find_my_pubkey_in_txinout(txout) assert changePath changePath = convert_bip32_intpath_to_strpath(changePath) changePubkey = changePubkey.hex() pubkeyarray_i = {'pubkey': changePubkey, 'keypath': changePath} pubkeyarray.append(pubkeyarray_i) # Special serialization of the unsigned transaction for # the mobile verification app. # At the moment, verification only works for p2pkh transactions. if p2pkhTransaction: tx_copy = copy.deepcopy(tx) # monkey-patch method of tx_copy instance to change serialization def input_script(self, txin: PartialTxInput, *, estimate_size=False): if txin.script_type == 'p2pkh': return Transaction.get_preimage_script(txin) raise Exception("unsupported type %s" % txin.script_type) tx_copy.input_script = input_script.__get__(tx_copy, PartialTransaction) tx_dbb_serialized = tx_copy.serialize_to_network() else: # We only need this for the signing echo / verification. tx_dbb_serialized = None # Build sign command dbb_signatures = [] steps = math.ceil(1.0 * len(hasharray) / self.maxInputs) for step in range(int(steps)): hashes = hasharray[step * self.maxInputs : (step + 1) * self.maxInputs] msg = { "sign": { "data": hashes, "checkpub": pubkeyarray, }, } if tx_dbb_serialized is not None: msg["sign"]["meta"] = to_hexstr(sha256d(tx_dbb_serialized)) msg = json.dumps(msg).encode('ascii') dbb_client = self.plugin.get_client(self) if not dbb_client.is_paired(): raise Exception("Could not sign transaction.") reply = dbb_client.hid_send_encrypt(msg) if 'error' in reply: raise Exception(reply['error']['message']) if 'echo' not in reply: raise Exception("Could not sign transaction.") if self.plugin.is_mobile_paired() and tx_dbb_serialized is not None: reply['tx'] = tx_dbb_serialized self.plugin.comserver_post_notification(reply) if steps > 1: self.handler.show_message(_("Signing large transaction. Please be patient ...") + "\n\n" + _("To continue, touch the Digital Bitbox's blinking light for 3 seconds.") + " " + _("(Touch {} of {})").format((step + 1), steps) + "\n\n" + _("To cancel, briefly touch the blinking light or wait for the timeout.") + "\n\n") else: self.handler.show_message(_("Signing transaction...") + "\n\n" + _("To continue, touch the Digital Bitbox's blinking light for 3 seconds.") + "\n\n" + _("To cancel, briefly touch the blinking light or wait for the timeout.")) # Send twice, first returns an echo for smart verification reply = dbb_client.hid_send_encrypt(msg) self.handler.finished() if 'error' in reply: if reply["error"].get('code') in (600, 601): # aborted via LED short touch or timeout raise UserCancelled() raise Exception(reply['error']['message']) if 'sign' not in reply: raise Exception("Could not sign transaction.") dbb_signatures.extend(reply['sign']) # Fill signatures if len(dbb_signatures) != len(tx.inputs()): raise Exception("Incorrect number of transactions signed.") # Should never occur for i, txin in enumerate(tx.inputs()): for pubkey_bytes in txin.pubkeys: if txin.is_complete(): break signed = dbb_signatures[i] if 'recid' in signed: # firmware > v2.1.1 recid = int(signed['recid'], 16) s = binascii.unhexlify(signed['sig']) h = inputhasharray[i] pk = ecc.ECPubkey.from_sig_string(s, recid, h) pk = pk.get_public_key_hex(compressed=True) elif 'pubkey' in signed: # firmware <= v2.1.1 pk = signed['pubkey'] if pk != pubkey_bytes.hex(): continue sig_r = int(signed['sig'][:64], 16) sig_s = int(signed['sig'][64:], 16) sig = ecc.der_sig_from_r_and_s(sig_r, sig_s) sig = to_hexstr(sig) + '01' tx.add_signature_to_txin(txin_idx=i, signing_pubkey=pubkey_bytes.hex(), sig=sig) except UserCancelled: raise except BaseException as e: self.give_error(e, True) else: _logger.info(f"Transaction is_complete {tx.is_complete()}")
('channel_update', { 'chain_hash': b'\xa0)>N\xeb=\xa6\xe6\xf5o\x81\xedY_W\x88\r\x1a!V\x9e\x13\xee\xfd\xd9Q(KZbfI', 'channel_flags': b'\x00', 'cltv_expiry_delta': 144, 'fee_base_msat': 500, 'fee_proportional_millionths': 35, 'htlc_maximum_msat': 1000000000, 'htlc_minimum_msat': 200, 'message_flags': b'\x01', 'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02', 'signature': bytes(64), 'timestamp': 1584320643 }), decode_msg( bfh("010200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a62664900d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00" ))) def test_encode_decode_msg__missing_optional_field_will_not_appear_in_decoded_dict( self): # "channel_update": optional field "htlc_maximum_msat" missing -> does not get put into dict self.assertEqual( bfh("010200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0293e4eeb3da6e6f56f81ed595f57880d1a21569e13eefdd951284b5a62664900d43100006f00025e6ed0830100009000000000000000c8000001f400000023" ), encode_msg( "channel_update", short_channel_id=ShortChannelID.from_components(54321, 111, 2), channel_flags=b'\x00', message_flags=b'\x01', cltv_expiry_delta=144, htlc_minimum_msat=200, fee_base_msat=500,
def test_read_tlv_stream_tests1(self): # from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-failures lnser = LNSerializer() for tlv_stream_name in ("n1", "n2"): with self.subTest(tlv_stream_name=tlv_stream_name): with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd01")), tlv_stream_name=tlv_stream_name) with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd000100")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd0101")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd26")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd2602")), tlv_stream_name=tlv_stream_name) with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd000100")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO( bfh("0ffd0201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" )), tlv_stream_name="n1") with self.assertRaises(UnknownMandatoryTLVRecordType): lnser.read_tlv_stream(fd=io.BytesIO(bfh("1200")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnknownMandatoryTLVRecordType): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd010200")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnknownMandatoryTLVRecordType): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0100000200")), tlv_stream_name=tlv_stream_name) with self.assertRaises(UnknownMandatoryTLVRecordType): lnser.read_tlv_stream(fd=io.BytesIO( bfh("ff010000000000000200")), tlv_stream_name=tlv_stream_name) with self.assertRaises(MsgTrailingGarbage): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0109ffffffffffffffffff")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("010100")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020001")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103000100")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("010400010000")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050001000000")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106000100000000")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("010700010000000000")), tlv_stream_name="n1") with self.assertRaises(FieldEncodingNotMinimal): lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080001000000000000")), tlv_stream_name="n1") with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("020701010101010101")), tlv_stream_name="n1") with self.assertRaises(MsgTrailingGarbage): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0209010101010101010101")), tlv_stream_name="n1") with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO( bfh("0321023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb" )), tlv_stream_name="n1") with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO( bfh("0329023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001" )), tlv_stream_name="n1") with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO( bfh("0330023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb000000000000000100000000000001" )), tlv_stream_name="n1") # check if ECC point is valid?... skip for now. #with self.assertRaises(Exception): # lnser.read_tlv_stream(fd=io.BytesIO(bfh("0331043da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002")), tlv_stream_name="n1") with self.assertRaises(MsgTrailingGarbage): lnser.read_tlv_stream(fd=io.BytesIO( bfh("0332023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001000000000000000001" )), tlv_stream_name="n1") with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe00")), tlv_stream_name="n1") with self.assertRaises(UnexpectedEndOfStream): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe0101")), tlv_stream_name="n1") with self.assertRaises(MsgTrailingGarbage): lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe03010101")), tlv_stream_name="n1") with self.assertRaises(UnknownMandatoryTLVRecordType): lnser.read_tlv_stream(fd=io.BytesIO(bfh("0000")), tlv_stream_name="n1")
def get_xfp_int(self) -> int: xfp = self.get_root_fingerprint() assert xfp is not None return xfp_int_from_xfp_bytes(bfh(xfp))
def test_read_tlv_stream_tests2(self): # from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-successes lnser = LNSerializer() for tlv_stream_name in ("n1", "n2"): with self.subTest(tlv_stream_name=tlv_stream_name): self.assertEqual({}, lnser.read_tlv_stream( fd=io.BytesIO(bfh("")), tlv_stream_name=tlv_stream_name)) self.assertEqual({}, lnser.read_tlv_stream( fd=io.BytesIO(bfh("2100")), tlv_stream_name=tlv_stream_name)) self.assertEqual({}, lnser.read_tlv_stream( fd=io.BytesIO(bfh("fd020100")), tlv_stream_name=tlv_stream_name)) self.assertEqual({}, lnser.read_tlv_stream( fd=io.BytesIO(bfh("fd00fd00")), tlv_stream_name=tlv_stream_name)) self.assertEqual({}, lnser.read_tlv_stream( fd=io.BytesIO(bfh("fd00ff00")), tlv_stream_name=tlv_stream_name)) self.assertEqual({}, lnser.read_tlv_stream( fd=io.BytesIO(bfh("fe0200000100")), tlv_stream_name=tlv_stream_name)) self.assertEqual( {}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("ff020000000000000100")), tlv_stream_name=tlv_stream_name)) self.assertEqual({"tlv1": { "amount_msat": 0 }}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("0100")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 1 }}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("010101")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 256 }}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020100")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 65536 }}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("0103010000")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 16777216 }}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("010401000000")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 4294967296 }}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("01050100000000")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 1099511627776 }}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("0106010000000000")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 281474976710656 }}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("010701000000000000")), tlv_stream_name="n1")) self.assertEqual({"tlv1": { "amount_msat": 72057594037927936 }}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("01080100000000000000")), tlv_stream_name="n1")) self.assertEqual( {"tlv2": { "scid": ShortChannelID.from_components(0, 0, 550) }}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("02080000000000000226")), tlv_stream_name="n1")) self.assertEqual( { "tlv3": { "node_id": bfh("023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb" ), "amount_msat_1": 1, "amount_msat_2": 2 } }, lnser.read_tlv_stream(fd=io.BytesIO( bfh("0331023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002" )), tlv_stream_name="n1")) self.assertEqual({"tlv4": { "cltv_delta": 550 }}, lnser.read_tlv_stream(fd=io.BytesIO( bfh("fd00fe020226")), tlv_stream_name="n1"))
def test_read_bigsize_int(self): self.assertEqual(0, read_bigsize_int(io.BytesIO(bfh("00")))) self.assertEqual(252, read_bigsize_int(io.BytesIO(bfh("fc")))) self.assertEqual(253, read_bigsize_int(io.BytesIO(bfh("fd00fd")))) self.assertEqual(65535, read_bigsize_int(io.BytesIO(bfh("fdffff")))) self.assertEqual(65536, read_bigsize_int(io.BytesIO(bfh("fe00010000")))) self.assertEqual(4294967295, read_bigsize_int(io.BytesIO(bfh("feffffffff")))) self.assertEqual( 4294967296, read_bigsize_int(io.BytesIO(bfh("ff0000000100000000")))) self.assertEqual( 18446744073709551615, read_bigsize_int(io.BytesIO(bfh("ffffffffffffffffff")))) with self.assertRaises(FieldEncodingNotMinimal): read_bigsize_int(io.BytesIO(bfh("fd00fc"))) with self.assertRaises(FieldEncodingNotMinimal): read_bigsize_int(io.BytesIO(bfh("fe0000ffff"))) with self.assertRaises(FieldEncodingNotMinimal): read_bigsize_int(io.BytesIO(bfh("ff00000000ffffffff"))) with self.assertRaises(UnexpectedEndOfStream): read_bigsize_int(io.BytesIO(bfh("fd00"))) with self.assertRaises(UnexpectedEndOfStream): read_bigsize_int(io.BytesIO(bfh("feffff"))) with self.assertRaises(UnexpectedEndOfStream): read_bigsize_int(io.BytesIO(bfh("ffffffffff"))) self.assertEqual(None, read_bigsize_int(io.BytesIO(bfh("")))) with self.assertRaises(UnexpectedEndOfStream): read_bigsize_int(io.BytesIO(bfh("fd"))) with self.assertRaises(UnexpectedEndOfStream): read_bigsize_int(io.BytesIO(bfh("fe"))) with self.assertRaises(UnexpectedEndOfStream): read_bigsize_int(io.BytesIO(bfh("ff")))
def build_psbt(tx: Transaction, wallet: Abstract_Wallet): # Render a PSBT file, for possible upload to Coldcard. # # TODO this should be part of Wallet object, or maybe Transaction? if getattr(tx, 'raw_psbt', False): _logger.info('PSBT cache hit') return tx.raw_psbt inputs = tx.inputs() if 'prev_tx' not in inputs[0]: # fetch info about inputs, if needed? # - needed during export PSBT flow, not normal online signing wallet.add_hw_info(tx) # wallet.add_hw_info installs this attr assert tx.output_info is not None, 'need data about outputs' # Build a map of all pubkeys needed as derivation from master XFP, in PSBT binary format # 1) binary version of the common subpath for all keys # m/ => fingerprint LE32 # a/b/c => ints # # 2) all used keys in transaction: # - for all inputs and outputs (when its change back) # - for all keystores, if multisig # subkeys = {} for ks in wallet.get_keystores(): # XFP + fixed prefix for this keystore ks_prefix = packed_xfp_path_for_keystore(ks) # all pubkeys needed for input signing for xpubkey, derivation in ks.get_tx_derivations(tx).items(): pubkey = xpubkey_to_pubkey(xpubkey) # assuming depth two, non-harded: change + index aa, bb = derivation assert 0 <= aa < 0x80000000 and 0 <= bb < 0x80000000 subkeys[bfh(pubkey)] = ks_prefix + pack('<II', aa, bb) # all keys related to change outputs for o in tx.outputs(): if o.address in tx.output_info: # this address "is_mine" but might not be change (if I send funds to myself) output_info = tx.output_info.get(o.address) if not output_info.is_change: continue chg_path = output_info.address_index assert chg_path[0] == 1 and len(chg_path) == 2, f"unexpected change path: {chg_path}" pubkey = ks.derive_pubkey(True, chg_path[1]) subkeys[bfh(pubkey)] = ks_prefix + pack('<II', *chg_path) for txin in inputs: assert txin['type'] != 'coinbase', _("Coinbase not supported") if txin['type'] in ['p2sh', 'p2wsh-p2sh', 'p2wsh']: assert type(wallet) is Multisig_Wallet # Construct PSBT from start to finish. out_fd = io.BytesIO() out_fd.write(b'psbt\xff') def write_kv(ktype, val, key=b''): # serialize helper: write w/ size and key byte out_fd.write(my_var_int(1 + len(key))) out_fd.write(bytes([ktype]) + key) if isinstance(val, str): val = bfh(val) out_fd.write(my_var_int(len(val))) out_fd.write(val) # global section: just the unsigned txn class CustomTXSerialization(Transaction): @classmethod def input_script(cls, txin, estimate_size=False): return '' unsigned = bfh(CustomTXSerialization(tx.serialize()).serialize_to_network(witness=False)) write_kv(PSBT_GLOBAL_UNSIGNED_TX, unsigned) if type(wallet) is Multisig_Wallet: # always put the xpubs into the PSBT, useful at least for checking for xp, ks in zip(wallet.get_master_public_keys(), wallet.get_keystores()): ks_prefix = packed_xfp_path_for_keystore(ks) write_kv(PSBT_GLOBAL_XPUB, ks_prefix, DecodeBase58Check(xp)) # end globals section out_fd.write(b'\x00') # inputs section for txin in inputs: if Transaction.is_segwit_input(txin): utxo = txin['prev_tx'].outputs()[txin['prevout_n']] spendable = txin['prev_tx'].serialize_output(utxo) write_kv(PSBT_IN_WITNESS_UTXO, spendable) else: write_kv(PSBT_IN_NON_WITNESS_UTXO, str(txin['prev_tx'])) pubkeys, x_pubkeys = tx.get_sorted_pubkeys(txin) pubkeys = [bfh(k) for k in pubkeys] if type(wallet) is Multisig_Wallet: # always need a redeem script for multisig scr = Transaction.get_preimage_script(txin) if Transaction.is_segwit_input(txin): # needed for both p2wsh-p2sh and p2wsh write_kv(PSBT_IN_WITNESS_SCRIPT, bfh(scr)) else: write_kv(PSBT_IN_REDEEM_SCRIPT, bfh(scr)) sigs = txin.get('signatures') for pk_pos, (pubkey, x_pubkey) in enumerate(zip(pubkeys, x_pubkeys)): if pubkey in subkeys: # faster? case ... calculated above write_kv(PSBT_IN_BIP32_DERIVATION, subkeys[pubkey], pubkey) else: # when an input is partly signed, tx.get_tx_derivations() # doesn't include that keystore's value and yet we need it # because we need to show a correct keypath... assert x_pubkey[0:2] == 'ff', x_pubkey for ks in wallet.get_keystores(): d = ks.get_pubkey_derivation(x_pubkey) if d is not None: ks_path = packed_xfp_path_for_keystore(ks, d) write_kv(PSBT_IN_BIP32_DERIVATION, ks_path, pubkey) break else: raise AssertionError("no keystore for: %s" % x_pubkey) if txin['type'] == 'p2wpkh-p2sh': assert len(pubkeys) == 1, 'can be only one redeem script per input' pa = hash_160(pubkey) assert len(pa) == 20 write_kv(PSBT_IN_REDEEM_SCRIPT, b'\x00\x14'+pa) # optional? insert (partial) signatures that we already have if sigs and sigs[pk_pos]: write_kv(PSBT_IN_PARTIAL_SIG, bfh(sigs[pk_pos]), pubkey) out_fd.write(b'\x00') # outputs section for o in tx.outputs(): # can be empty, but must be present, and helpful to show change inputs # wallet.add_hw_info() adds some data about change outputs into tx.output_info if o.address in tx.output_info: # this address "is_mine" but might not be change (if I send funds to myself) output_info = tx.output_info.get(o.address) if output_info.is_change: pubkeys = [bfh(i) for i in wallet.get_public_keys(o.address)] # Add redeem/witness script? if type(wallet) is Multisig_Wallet: # always need a redeem script for multisig cases scr = bfh(multisig_script([bh2u(i) for i in sorted(pubkeys)], wallet.m)) if output_info.script_type == 'p2wsh-p2sh': write_kv(PSBT_OUT_WITNESS_SCRIPT, scr) write_kv(PSBT_OUT_REDEEM_SCRIPT, b'\x00\x20' + sha256(scr)) elif output_info.script_type == 'p2wsh': write_kv(PSBT_OUT_WITNESS_SCRIPT, scr) elif output_info.script_type == 'p2sh': write_kv(PSBT_OUT_REDEEM_SCRIPT, scr) else: raise ValueError(output_info.script_type) elif output_info.script_type == 'p2wpkh-p2sh': # need a redeem script when P2SH is used to wrap p2wpkh assert len(pubkeys) == 1 pa = hash_160(pubkeys[0]) write_kv(PSBT_OUT_REDEEM_SCRIPT, b'\x00\x14' + pa) # Document change output's bip32 derivation(s) for pubkey in pubkeys: sk = subkeys[pubkey] write_kv(PSBT_OUT_BIP32_DERIVATION, sk, pubkey) out_fd.write(b'\x00') # capture for later use tx.raw_psbt = out_fd.getvalue() return tx.raw_psbt
def test_encode_decode_msg__commitment_signed(self): # "commitment_signed" is interesting because of the "htlc_signature" field, # which is a concatenation of multiple ("num_htlcs") signatures. # 5 htlcs self.assertEqual( bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542" ), encode_msg( "commitment_signed", channel_id= b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', signature= b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-", num_htlcs=5, htlc_signature=bfh( "6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542" ), )) self.assertEqual(('commitment_signed', { 'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', 'signature': b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-", 'num_htlcs': 5, 'htlc_signature': bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542" ) }), decode_msg( bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542" ))) # single htlc self.assertEqual( bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a" ), encode_msg( "commitment_signed", channel_id= b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', signature= b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e', num_htlcs=1, htlc_signature=bfh( "2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a" ), )) self.assertEqual(('commitment_signed', { 'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', 'signature': b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e', 'num_htlcs': 1, 'htlc_signature': bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a" ) }), decode_msg( bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a" ))) # zero htlcs self.assertEqual( bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000" ), encode_msg( "commitment_signed", channel_id= b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', signature= b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E', num_htlcs=0, htlc_signature=bfh(""), )) self.assertEqual(('commitment_signed', { 'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', 'signature': b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E', 'num_htlcs': 0, 'htlc_signature': bfh("") }), decode_msg( bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000" )))
def xfp_from_xpub(xpub): # sometime we need to BIP32 fingerprint value: 4 bytes of ripemd(sha256(pubkey)) kk = bfh(Xpub.get_pubkey_from_xpub(xpub, [])) assert len(kk) == 33 xfp, = unpack('<I', hash_160(kk)[0:4]) return xfp
def sign_transaction(self, tx, password): if tx.is_complete(): return inputs = [] inputsPaths = [] chipInputs = [] redeemScripts = [] changePath = "" output = None p2shTransaction = False segwitTransaction = False pin = "" client_ledger = self.get_client( ) # prompt for the PIN before displaying the dialog if necessary client_electrum = self.get_client_electrum() assert client_electrum # Fetch inputs of the transaction to sign for txin in tx.inputs(): if txin.is_coinbase_input(): self.give_error( "Coinbase not supported") # should never happen if txin.script_type in ['p2sh']: p2shTransaction = True if txin.script_type in ['p2wpkh-p2sh', 'p2wsh-p2sh']: if not client_electrum.supports_segwit(): self.give_error(MSG_NEEDS_FW_UPDATE_SEGWIT) segwitTransaction = True if txin.script_type in ['p2wpkh', 'p2wsh']: if not client_electrum.supports_native_segwit(): self.give_error(MSG_NEEDS_FW_UPDATE_SEGWIT) segwitTransaction = True my_pubkey, full_path = self.find_my_pubkey_in_txinout(txin) if not full_path: self.give_error("No matching pubkey for sign_transaction" ) # should never happen full_path = convert_bip32_intpath_to_strpath(full_path)[2:] redeemScript = Transaction.get_preimage_script(txin) txin_prev_tx = txin.utxo if txin_prev_tx is None and not Transaction.is_segwit_input(txin): raise UserFacingException( _('Missing previous tx for legacy input.')) txin_prev_tx_raw = txin_prev_tx.serialize( ) if txin_prev_tx else None inputs.append([ txin_prev_tx_raw, txin.prevout.out_idx, redeemScript, txin.prevout.txid.hex(), my_pubkey, txin.nsequence, txin.value_sats() ]) inputsPaths.append(full_path) # Sanity check if p2shTransaction: for txin in tx.inputs(): if txin.script_type != 'p2sh': self.give_error( "P2SH / regular input mixed in same transaction not supported" ) # should never happen txOutput = var_int(len(tx.outputs())) for o in tx.outputs(): txOutput += int_to_hex(o.value, 8) script = o.scriptpubkey.hex() txOutput += var_int(len(script) // 2) txOutput += script txOutput = bfh(txOutput) if not client_electrum.supports_multi_output(): if len(tx.outputs()) > 2: self.give_error( "Transaction with more than 2 outputs not supported") for txout in tx.outputs(): if not txout.address: if client_electrum.is_hw1(): self.give_error( _("Only address outputs are supported by {}").format( self.device)) # note: max_size based on https://github.com/LedgerHQ/ledger-app-btc/commit/3a78dee9c0484821df58975803e40d58fbfc2c38#diff-c61ccd96a6d8b54d48f54a3bc4dfa7e2R26 validate_op_return_output(txout, max_size=190) # Output "change" detection # - only one output and one change is authorized (for hw.1 and nano) # - at most one output can bypass confirmation (~change) (for all) if not p2shTransaction: has_change = False any_output_on_change_branch = is_any_tx_output_on_change_branch(tx) for txout in tx.outputs(): if txout.is_mine and len(tx.outputs()) > 1 \ and not has_change: # prioritise hiding outputs on the 'change' branch from user # because no more than one change address allowed if txout.is_change == any_output_on_change_branch: my_pubkey, changePath = self.find_my_pubkey_in_txinout( txout) assert changePath changePath = convert_bip32_intpath_to_strpath( changePath)[2:] has_change = True else: output = txout.address else: output = txout.address self.handler.show_message( _("Confirm Transaction on your Ledger device...")) try: # Get trusted inputs from the original transactions for utxo in inputs: sequence = int_to_hex(utxo[5], 4) if segwitTransaction and not client_electrum.supports_segwit_trustedInputs( ): tmp = bfh(utxo[3])[::-1] tmp += bfh(int_to_hex(utxo[1], 4)) tmp += bfh(int_to_hex(utxo[6], 8)) # txin['value'] chipInputs.append({ 'value': tmp, 'witness': True, 'sequence': sequence }) redeemScripts.append(bfh(utxo[2])) elif (not p2shTransaction ) or client_electrum.supports_multi_output(): txtmp = bitcoinTransaction(bfh(utxo[0])) trustedInput = client_ledger.getTrustedInput( txtmp, utxo[1]) trustedInput['sequence'] = sequence if segwitTransaction: trustedInput['witness'] = True chipInputs.append(trustedInput) if p2shTransaction or segwitTransaction: redeemScripts.append(bfh(utxo[2])) else: redeemScripts.append(txtmp.outputs[utxo[1]].script) else: tmp = bfh(utxo[3])[::-1] tmp += bfh(int_to_hex(utxo[1], 4)) chipInputs.append({'value': tmp, 'sequence': sequence}) redeemScripts.append(bfh(utxo[2])) # Sign all inputs firstTransaction = True inputIndex = 0 rawTx = tx.serialize_to_network() client_ledger.enableAlternate2fa(False) if segwitTransaction: client_ledger.startUntrustedTransaction( True, inputIndex, chipInputs, redeemScripts[inputIndex], version=tx.version) # we don't set meaningful outputAddress, amount and fees # as we only care about the alternateEncoding==True branch outputData = client_ledger.finalizeInput( b'', 0, 0, changePath, bfh(rawTx)) outputData['outputData'] = txOutput if outputData['confirmationNeeded']: outputData['address'] = output self.handler.finished() pin = self.handler.get_auth( outputData ) # does the authenticate dialog and returns pin if not pin: raise UserWarning() self.handler.show_message( _("Confirmed. Signing Transaction...")) while inputIndex < len(inputs): singleInput = [chipInputs[inputIndex]] client_ledger.startUntrustedTransaction( False, 0, singleInput, redeemScripts[inputIndex], version=tx.version) inputSignature = client_ledger.untrustedHashSign( inputsPaths[inputIndex], pin, lockTime=tx.locktime) inputSignature[0] = 0x30 # force for 1.4.9+ my_pubkey = inputs[inputIndex][4] tx.add_signature_to_txin(txin_idx=inputIndex, signing_pubkey=my_pubkey.hex(), sig=inputSignature.hex()) inputIndex = inputIndex + 1 else: while inputIndex < len(inputs): client_ledger.startUntrustedTransaction( firstTransaction, inputIndex, chipInputs, redeemScripts[inputIndex], version=tx.version) # we don't set meaningful outputAddress, amount and fees # as we only care about the alternateEncoding==True branch outputData = client_ledger.finalizeInput( b'', 0, 0, changePath, bfh(rawTx)) outputData['outputData'] = txOutput if outputData['confirmationNeeded']: outputData['address'] = output self.handler.finished() pin = self.handler.get_auth( outputData ) # does the authenticate dialog and returns pin if not pin: raise UserWarning() self.handler.show_message( _("Confirmed. Signing Transaction...")) else: # Sign input with the provided PIN inputSignature = client_ledger.untrustedHashSign( inputsPaths[inputIndex], pin, lockTime=tx.locktime) inputSignature[0] = 0x30 # force for 1.4.9+ my_pubkey = inputs[inputIndex][4] tx.add_signature_to_txin( txin_idx=inputIndex, signing_pubkey=my_pubkey.hex(), sig=inputSignature.hex()) inputIndex = inputIndex + 1 firstTransaction = False except UserWarning: self.handler.show_error(_('Cancelled by user')) return except BTChipException as e: if e.sw in (0x6985, 0x6d00): # cancelled by user return elif e.sw == 0x6982: raise # pin lock. decorator will catch it else: self.logger.exception('') self.give_error(e, True) except BaseException as e: self.logger.exception('') self.give_error(e, True) finally: self.handler.finished()