Exemplo n.º 1
0
 def test_encode_decode_msg__missing_optional_field_will_not_appear_in_decoded_dict(
         self):
     # "channel_update": optional field "htlc_maximum_msat" missing -> does not get put into dict
     self.assertEqual(
         bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023"
             ),
         encode_msg(
             "channel_update",
             short_channel_id=ShortChannelID.from_components(54321, 111, 2),
             channel_flags=b'\x00',
             message_flags=b'\x01',
             cltv_expiry_delta=144,
             htlc_minimum_msat=200,
             fee_base_msat=500,
             fee_proportional_millionths=35,
             chain_hash=constants.net.rev_genesis_bytes(),
             timestamp=1584320643,
         ))
     self.assertEqual(
         ('channel_update', {
             'chain_hash':
             b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
             'channel_flags': b'\x00',
             'cltv_expiry_delta': 144,
             'fee_base_msat': 500,
             'fee_proportional_millionths': 35,
             'htlc_minimum_msat': 200,
             'message_flags': b'\x01',
             'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
             'signature': bytes(64),
             'timestamp': 1584320643
         }),
         decode_msg(
             bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023"
                 )))
Exemplo n.º 2
0
 def test_verify_fail_f_tx_even(self):
     """Raise if inner node of merkle branch is valid tx. ('even' fake leaf position)"""
     # last 32 bytes of T encoded as hash
     fake_branch_node = hash_encode(bfh(VALID_64_BYTE_TX[64:]))
     fake_mbranch = [fake_branch_node] + MERKLE_BRANCH
     # first 32 bytes of T encoded as hash
     f_tx_hash = hash_encode(bfh(VALID_64_BYTE_TX[:64]))
     with self.assertRaises(InnerNodeOfSpvProofIsValidTx):
         SPV.hash_merkle_root(fake_mbranch, f_tx_hash, 6)
Exemplo n.º 3
0
 def parse_script(self, x):
     script = ''
     for word in x.split():
         if word[0:3] == 'OP_':
             opcode_int = opcodes[word]
             script += construct_script([opcode_int])
         else:
             bfh(word)  # to test it is hex data
             script += construct_script([word])
     return script
Exemplo n.º 4
0
 def parse_output(self, x) -> bytes:
     try:
         address = self.parse_address(x)
         return bfh(syscoin.address_to_script(address))
     except Exception:
         pass
     try:
         script = self.parse_script(x)
         return bfh(script)
     except Exception:
         pass
     raise Exception("Invalid address or script.")
Exemplo n.º 5
0
 def get_noise_map(
         cls, versioned_seed: VersionedSeed) -> Dict[Tuple[int, int], int]:
     """Returns a map from (x,y) coordinate to pixel value 0/1, to be used as rawnoise."""
     w, h = cls.SIZE
     version = versioned_seed.version
     hex_seed = versioned_seed.seed
     checksum = versioned_seed.checksum
     noise_map = {}
     if version == '0':
         random.seed(int(hex_seed, 16))
         for x in range(w):
             for y in range(h):
                 noise_map[(x, y)] = random.randint(0, 1)
     elif version == '1':
         prng_seed = bfh(hex_seed + version + checksum)
         drbg = DRBG(prng_seed)
         num_noise_bytes = 1929  # ~ w*h
         noise_array = bin(
             int.from_bytes(drbg.generate(num_noise_bytes), 'big'))[2:]
         # there's an approx 1/1024 chance that the generated number is 'too small'
         # and we would get IndexError below. easiest backwards compat fix:
         noise_array += '0' * (w * h - len(noise_array))
         i = 0
         for x in range(w):
             for y in range(h):
                 noise_map[(x, y)] = int(noise_array[i])
                 i += 1
     else:
         raise Exception(f"unexpected revealer version: {version}")
     return noise_map
Exemplo n.º 6
0
 def test_encode_decode_msg__ints_can_be_passed_as_bytes(self):
     self.assertEqual(
         bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"
             ),
         encode_msg(
             "channel_update",
             short_channel_id=ShortChannelID.from_components(54321, 111, 2),
             channel_flags=b'\x00',
             message_flags=b'\x01',
             cltv_expiry_delta=int.to_bytes(144,
                                            length=2,
                                            byteorder="big",
                                            signed=False),
             htlc_minimum_msat=int.to_bytes(200,
                                            length=8,
                                            byteorder="big",
                                            signed=False),
             htlc_maximum_msat=int.to_bytes(1_000_000_000,
                                            length=8,
                                            byteorder="big",
                                            signed=False),
             fee_base_msat=int.to_bytes(500,
                                        length=4,
                                        byteorder="big",
                                        signed=False),
             fee_proportional_millionths=int.to_bytes(35,
                                                      length=4,
                                                      byteorder="big",
                                                      signed=False),
             chain_hash=constants.net.rev_genesis_bytes(),
             timestamp=int.to_bytes(1584320643,
                                    length=4,
                                    byteorder="big",
                                    signed=False),
         ))
Exemplo n.º 7
0
 def test_write_bigsize_int(self):
     self.assertEqual(bfh("00"), write_bigsize_int(0))
     self.assertEqual(bfh("fc"), write_bigsize_int(252))
     self.assertEqual(bfh("fd00fd"), write_bigsize_int(253))
     self.assertEqual(bfh("fdffff"), write_bigsize_int(65535))
     self.assertEqual(bfh("fe00010000"), write_bigsize_int(65536))
     self.assertEqual(bfh("feffffffff"), write_bigsize_int(4294967295))
     self.assertEqual(bfh("ff0000000100000000"),
                      write_bigsize_int(4294967296))
     self.assertEqual(bfh("ffffffffffffffffff"),
                      write_bigsize_int(18446744073709551615))
Exemplo n.º 8
0
 def test_read_tlv_stream_tests3(self):
     # from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-stream-decoding-failure
     lnser = LNSerializer()
     with self.assertRaises(MsgInvalidFieldOrder):
         lnser.read_tlv_stream(fd=io.BytesIO(
             bfh("0208000000000000022601012a")),
                               tlv_stream_name="n1")
     with self.assertRaises(MsgInvalidFieldOrder):
         lnser.read_tlv_stream(fd=io.BytesIO(
             bfh("0208000000000000023102080000000000000451")),
                               tlv_stream_name="n1")
     with self.assertRaises(MsgInvalidFieldOrder):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f000f012a")),
                               tlv_stream_name="n1")
     with self.assertRaises(MsgInvalidFieldOrder):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f001f012a")),
                               tlv_stream_name="n1")
     with self.assertRaises(MsgInvalidFieldOrder):
         lnser.read_tlv_stream(fd=io.BytesIO(
             bfh("ffffffffffffffffff000000")),
                               tlv_stream_name="n2")
Exemplo n.º 9
0
 def test_encode_decode_msg__init(self):
     # "init" is interesting because it has TLVs optionally
     self.assertEqual(
         bfh("00100000000220c2"),
         encode_msg(
             "init",
             gflen=0,
             flen=2,
             features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT
                       | LnFeatures.GOSSIP_QUERIES_OPT
                       | LnFeatures.GOSSIP_QUERIES_REQ
                       | LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT),
         ))
     self.assertEqual(
         bfh("00100000000220c2"),
         encode_msg("init", gflen=0, flen=2, features=bfh("20c2")))
     self.assertEqual(
         bfh("00100000000220c2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000"
             ),
         encode_msg(
             "init",
             gflen=0,
             flen=2,
             features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT
                       | LnFeatures.GOSSIP_QUERIES_OPT
                       | LnFeatures.GOSSIP_QUERIES_REQ
                       | LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT),
             init_tlvs={
                 'networks': {
                     'chains':
                     b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'
                 }
             }))
     self.assertEqual(('init', {
         'gflen': 2,
         'globalfeatures': b'"\x00',
         'flen': 3,
         'features': b'\x02\xa2\xa1',
         'init_tlvs': {}
     }), decode_msg(bfh("001000022200000302a2a1")))
     self.assertEqual(
         ('init', {
             'gflen': 2,
             'globalfeatures': b'"\x00',
             'flen': 3,
             'features': b'\x02\xaa\xa2',
             'init_tlvs': {
                 'networks': {
                     'chains':
                     b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'
                 }
             }
         }),
         decode_msg(
             bfh("001000022200000302aaa2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000"
                 )))
Exemplo n.º 10
0
    def deserialize_with_auxpow(data_hex: str, **kwargs):
        """Deserializes a block header given as hex string

        This makes sure that the data is always deserialised as full
        block header with AuxPoW.

        The keyword-arguments expect_trailing_data and start_position can be
        set and will be passed on to deserialize_header."""

        # We pass a height beyond the last checkpoint, because
        # deserialize_header expects checkpointed headers to be truncated
        # by ElectrumX (i.e. not contain an AuxPoW).
        return blockchain.deserialize_header(
            bfh(data_hex),
            constants.net.max_checkpoint() + 1, **kwargs)
Exemplo n.º 11
0
    def test_deserialize_auxpow_header_explicit_coinbase(self):
        header = self.deserialize_with_auxpow(namecoin_header_37174)
        header_auxpow = header['auxpow']

        self.assertEqual(constants.net.AUXPOW_CHAIN_ID0,
                         header_auxpow['chain_id'])

        coinbase_tx = header_auxpow['parent_coinbase_tx']
        expected_coinbase_txid = '8a3164be45a621f85318647d425fe9f45837b8e42ec4fdd902d7f64daf61ff4a'
        observed_coinbase_txid = auxpow.fast_txid(coinbase_tx)

        self.assertEqual(expected_coinbase_txid, observed_coinbase_txid)

        self.assertEqual(header_auxpow['coinbase_merkle_branch'], [
            "f8f27314022a5165ae122642babb28dd44191dd36f99dad80b4f16b75197dde0",
            "c8a9dc420e17dee7b04bc0174c7a37ed9e5bc3f0ea0fdfe0b5d24bfc19ecedb0",
            "0ce9c5b98e212527e4aa7b9298435dc4e8f4dfc4dc63b7c89c06300637c33620",
            "3b6d0c4122a5b047cb879a440461839f0446f6bd451f01c6f0b14b6624e84136",
            "458500be38a68b215112df5e52d9c08fdd52034fb2005ce15d2a42be28e436cb",
        ])

        coinbase_merkle_index = header_auxpow['coinbase_merkle_index']
        self.assertEqual(0, coinbase_merkle_index)

        self.assertEqual(header_auxpow['chain_merkle_branch'], [
            "000000000000000000000000000000000000000000000000000000000000000a",
            "65bd8eb2c7e3a3646507977e8659e5396b197f197fbb51e7158927a263798302",
            "5f961bb13289d705abb28376a01f7097535c95f87b9e719b9ec39d8eb20d72e9",
            "7cb5fdcc41120d6135a40a6753bddc0c9b675ba2936d2e0cd78cdcb02e6beb50",
        ])

        chain_merkle_index = header_auxpow['chain_merkle_index']
        self.assertEqual(11, chain_merkle_index)

        expected_parent_header = blockchain.deserialize_pure_header(
            bfh('0100000055a7bc918827dbe7d8027781d803f4b418589b7b9fc03e718a03000000000000625a3d6dc4dfb0ab25f450cd202ff3bdb074f2edde1ddb4af5217e10c9dbafb9639a0a4fd7690d1a25aeaa97'
                ), None)

        expected_parent_hash = blockchain.hash_header(expected_parent_header)
        observed_parent_hash = blockchain.hash_header(
            header_auxpow['parent_header'])
        self.assertEqual(expected_parent_hash, observed_parent_hash)

        expected_parent_merkle_root = expected_parent_header['merkle_root']
        observed_parent_merkle_root = header_auxpow['parent_header'][
            'merkle_root']
        self.assertEqual(expected_parent_merkle_root,
                         observed_parent_merkle_root)
Exemplo n.º 12
0
    def test_decode_onion_error(self):
        orf = OnionRoutingFailure.from_bytes(
            bfh("400f0000000017d2d8b0001d9458"))
        self.assertEqual(('incorrect_or_unknown_payment_details', {
            'htlc_msat': 399694000,
            'height': 1938520
        }), OnionWireSerializer.decode_msg(orf.to_bytes()))
        self.assertEqual({
            'htlc_msat': 399694000,
            'height': 1938520
        }, orf.decode_data())

        orf2 = OnionRoutingFailure(26399,
                                   bytes.fromhex("0000000017d2d8b0001d9458"))
        with self.assertRaises(UnknownMsgType):
            OnionWireSerializer.decode_msg(orf2.to_bytes())
        self.assertEqual(None, orf2.decode_data())
Exemplo n.º 13
0
 def sign_transaction(self, keystore, tx: PartialTransaction, prev_tx):
     prev_tx = {
         bfh(txhash): self.electrumsys_tx_to_txtype(tx)
         for txhash, tx in prev_tx.items()
     }
     client = self.get_client(keystore)
     inputs = self.tx_inputs(tx, for_sig=True, keystore=keystore)
     outputs = self.tx_outputs(tx, keystore=keystore)
     signatures, _ = client.sign_tx(
         self.get_coin_name(),
         inputs,
         outputs,
         lock_time=tx.locktime,
         version=tx.version,
         amount_unit=self.get_trezor_amount_unit(),
         prev_txes=prev_tx)
     signatures = [(bh2u(x) + '01') for x in signatures]
     tx.update_signatures(signatures)
Exemplo n.º 14
0
 def test_encode_decode_msg__missing_mandatory_field_gets_set_to_zeroes(
         self):
     # "channel_update": "signature" missing -> gets set to zeroes
     self.assertEqual(
         bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"
             ),
         encode_msg(
             "channel_update",
             short_channel_id=ShortChannelID.from_components(54321, 111, 2),
             channel_flags=b'\x00',
             message_flags=b'\x01',
             cltv_expiry_delta=144,
             htlc_minimum_msat=200,
             htlc_maximum_msat=1_000_000_000,
             fee_base_msat=500,
             fee_proportional_millionths=35,
             chain_hash=constants.net.rev_genesis_bytes(),
             timestamp=1584320643,
         ))
Exemplo n.º 15
0
    def test_should_reject_coinbase_root_too_late(self):
        header = self.deserialize_with_auxpow(namecoin_header_19414)

        input_script = header['auxpow']['parent_coinbase_tx'].inputs(
        )[0].script_sig

        padded_script = bfh('00') * (
            auxpow.MAX_INDEX_PC_BACKWARDS_COMPATIBILITY + 4)
        padded_script += input_script[8:]

        header['auxpow']['parent_coinbase_tx']._inputs[
            0].script_sig = padded_script

        self.clear_coinbase_outputs(header['auxpow'])

        with self.assertRaises(auxpow.AuxPoWCoinbaseRootTooLate):
            blockchain.Blockchain.verify_header(header,
                                                namecoin_prev_hash_19414,
                                                namecoin_target_19414)
Exemplo n.º 16
0
    def show_address(self,
                     wallet,
                     address,
                     keystore: 'Coldcard_KeyStore' = None):
        if keystore is None:
            keystore = wallet.get_keystore()
        if not self.show_address_helper(wallet, address, keystore):
            return

        txin_type = wallet.get_txin_type(address)

        # Standard_Wallet => not multisig, must be bip32
        if type(wallet) is Standard_Wallet:
            sequence = wallet.get_address_index(address)
            keystore.show_address(sequence, txin_type)
        elif type(wallet) is Multisig_Wallet:
            assert isinstance(
                wallet, Multisig_Wallet)  # only here for type-hints in IDE
            # More involved for P2SH/P2WSH addresses: need M, and all public keys, and their
            # derivation paths. Must construct script, and track fingerprints+paths for
            # all those keys

            pubkey_deriv_info = wallet.get_public_keys_with_deriv_info(address)
            pubkey_hexes = sorted([pk.hex() for pk in list(pubkey_deriv_info)])
            xfp_paths = []
            for pubkey_hex in pubkey_hexes:
                pubkey = bytes.fromhex(pubkey_hex)
                ks, der_suffix = pubkey_deriv_info[pubkey]
                fp_bytes, der_full = ks.get_fp_and_derivation_to_be_used_in_partial_tx(
                    der_suffix, only_der_suffix=False)
                xfp_int = xfp_int_from_xfp_bytes(fp_bytes)
                xfp_paths.append([xfp_int] + list(der_full))

            script = bfh(wallet.pubkeys_to_scriptcode(pubkey_hexes))

            keystore.show_p2sh_address(wallet.m, script, xfp_paths, txin_type)

        else:
            keystore.handler.show_error(
                _('This function is only available for standard wallets when using {}.'
                  ).format(self.device))
            return
Exemplo n.º 17
0
    def clear_coinbase_outputs(auxpow_header: dict,
                               fix_merkle_root=True) -> None:
        """Clears the auxpow coinbase outputs

        Set the outputs of the auxpow coinbase to an empty list.  This is
        necessary when the coinbase has been modified and needs to be
        re-serialised, since present outputs are invalid due to the
        fast_tx_deserialize optimisation."""

        auxpow_header['parent_coinbase_tx']._outputs = []

        # Clear the cached raw serialization
        auxpow_header['parent_coinbase_tx'].invalidate_ser_cache()

        # Re-serialize.  Note that our AuxPoW library won't do this for us,
        # because it optimizes via fast_txid.
        auxpow_header['parent_coinbase_tx']._cached_network_ser_bytes = bfh(
            auxpow_header['parent_coinbase_tx'].serialize_to_network(
                force_legacy=True))

        # Correct the coinbase Merkle root.
        if fix_merkle_root:
            update_merkle_root_to_match_coinbase(auxpow_header)
Exemplo n.º 18
0
            ('channel_update', {
                'chain_hash':
                b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
                'channel_flags': b'\x00',
                'cltv_expiry_delta': 144,
                'fee_base_msat': 500,
                'fee_proportional_millionths': 35,
                'htlc_maximum_msat': 1000000000,
                'htlc_minimum_msat': 200,
                'message_flags': b'\x01',
                'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
                'signature': bytes(64),
                'timestamp': 1584320643
            }),
            decode_msg(
                bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"
                    )))

    def test_encode_decode_msg__missing_optional_field_will_not_appear_in_decoded_dict(
            self):
        # "channel_update": optional field "htlc_maximum_msat" missing -> does not get put into dict
        self.assertEqual(
            bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023"
                ),
            encode_msg(
                "channel_update",
                short_channel_id=ShortChannelID.from_components(54321, 111, 2),
                channel_flags=b'\x00',
                message_flags=b'\x01',
                cltv_expiry_delta=144,
                htlc_minimum_msat=200,
                fee_base_msat=500,
Exemplo n.º 19
0
class TestBlockchain(ElectrumSysTestCase):

    HEADERS = {
        'A':
        deserialize_header(
            bfh("0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4adae5494dffff7f2002000000"
                ), 0),
        'B':
        deserialize_header(
            bfh("0000002006226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f186c8dfd970a4545f79916bc1d75c9d00432f57c89209bf3bb115b7612848f509c25f45bffff7f2000000000"
                ), 1),
        'C':
        deserialize_header(
            bfh("00000020686bdfc6a3db73d5d93e8c9663a720a26ecb1ef20eb05af11b36cdbc57c19f7ebf2cbf153013a1c54abaf70e95198fcef2f3059cc6b4d0f7e876808e7d24d11cc825f45bffff7f2000000000"
                ), 2),
        'D':
        deserialize_header(
            bfh("00000020122baa14f3ef54985ae546d1611559e3f487bd2a0f46e8dbb52fbacc9e237972e71019d7feecd9b8596eca9a67032c5f4641b23b5d731dc393e37de7f9c2f299e725f45bffff7f2000000000"
                ), 3),
        'E':
        deserialize_header(
            bfh("00000020f8016f7ef3a17d557afe05d4ea7ab6bde1b2247b7643896c1b63d43a1598b747a3586da94c71753f27c075f57f44faf913c31177a0957bbda42e7699e3a2141aed25f45bffff7f2001000000"
                ), 4),
        'F':
        deserialize_header(
            bfh("000000201d589c6643c1d121d73b0573e5ee58ab575b8fdf16d507e7e915c5fbfbbfd05e7aee1d692d1615c3bdf52c291032144ce9e3b258a473c17c745047f3431ff8e2ee25f45bffff7f2000000000"
                ), 5),
        'O':
        deserialize_header(
            bfh("00000020b833ed46eea01d4c980f59feee44a66aa1162748b6801029565d1466790c405c3a141ce635cbb1cd2b3a4fcdd0a3380517845ba41736c82a79cab535d31128066526f45bffff7f2001000000"
                ), 6),
        'P':
        deserialize_header(
            bfh("00000020abe8e119d1877c9dc0dc502d1a253fb9a67967c57732d2f71ee0280e8381ff0a9690c2fe7c1a4450c74dc908fe94dd96c3b0637d51475e9e06a78e944a0c7fe28126f45bffff7f2000000000"
                ), 7),
        'Q':
        deserialize_header(
            bfh("000000202ce41d94eb70e1518bc1f72523f84a903f9705d967481e324876e1f8cf4d3452148be228a4c3f2061bafe7efdfc4a8d5a94759464b9b5c619994d45dfcaf49e1a126f45bffff7f2000000000"
                ), 8),
        'R':
        deserialize_header(
            bfh("00000020552755b6c59f3d51e361d16281842a4e166007799665b5daed86a063dd89857415681cb2d00ff889193f6a68a93f5096aeb2d84ca0af6185a462555822552221a626f45bffff7f2000000000"
                ), 9),
        'S':
        deserialize_header(
            bfh("00000020a13a491cbefc93cd1bb1938f19957e22a134faf14c7dee951c45533e2c750f239dc087fc977b06c24a69c682d1afd1020e6dc1f087571ccec66310a786e1548fab26f45bffff7f2000000000"
                ), 10),
        'T':
        deserialize_header(
            bfh("00000020dbf3a9b55dfefbaf8b6e43a89cf833fa2e208bbc0c1c5d76c0d71b9e4a65337803b243756c25053253aeda309604363460a3911015929e68705bd89dff6fe064b026f45bffff7f2002000000"
                ), 11),
        'U':
        deserialize_header(
            bfh("000000203d0932b3b0c78eccb39a595a28ae4a7c966388648d7783fd1305ec8d40d4fe5fd67cb902a7d807cee7676cb543feec3e053aa824d5dfb528d5b94f9760313d9db726f45bffff7f2001000000"
                ), 12),
        'G':
        deserialize_header(
            bfh("00000020b833ed46eea01d4c980f59feee44a66aa1162748b6801029565d1466790c405c3a141ce635cbb1cd2b3a4fcdd0a3380517845ba41736c82a79cab535d31128066928f45bffff7f2001000000"
                ), 6),
        'H':
        deserialize_header(
            bfh("00000020e19e687f6e7f83ca394c114144dbbbc4f3f9c9450f66331a125413702a2e1a719690c2fe7c1a4450c74dc908fe94dd96c3b0637d51475e9e06a78e944a0c7fe26a28f45bffff7f2002000000"
                ), 7),
        'I':
        deserialize_header(
            bfh("0000002009dcb3b158293c89d7cf7ceeb513add122ebc3880a850f47afbb2747f5e48c54148be228a4c3f2061bafe7efdfc4a8d5a94759464b9b5c619994d45dfcaf49e16a28f45bffff7f2000000000"
                ), 8),
        'J':
        deserialize_header(
            bfh("000000206a65f3bdd3374a5a6c4538008ba0b0a560b8566291f9ef4280ab877627a1742815681cb2d00ff889193f6a68a93f5096aeb2d84ca0af6185a462555822552221c928f45bffff7f2000000000"
                ), 9),
        'K':
        deserialize_header(
            bfh("00000020bb3b421653548991998f96f8ba486b652fdb07ca16e9cee30ece033547cd1a6e9dc087fc977b06c24a69c682d1afd1020e6dc1f087571ccec66310a786e1548fca28f45bffff7f2000000000"
                ), 10),
        'L':
        deserialize_header(
            bfh("00000020c391d74d37c24a130f4bf4737932bdf9e206dd4fad22860ec5408978eb55d46303b243756c25053253aeda309604363460a3911015929e68705bd89dff6fe064ca28f45bffff7f2000000000"
                ), 11),
        'M':
        deserialize_header(
            bfh("000000206a65f3bdd3374a5a6c4538008ba0b0a560b8566291f9ef4280ab877627a1742815681cb2d00ff889193f6a68a93f5096aeb2d84ca0af6185a4625558225522214229f45bffff7f2000000000"
                ), 9),
        'N':
        deserialize_header(
            bfh("00000020383dab38b57f98aa9b4f0d5ff868bc674b4828d76766bf048296f4c45fff680a9dc087fc977b06c24a69c682d1afd1020e6dc1f087571ccec66310a786e1548f4329f45bffff7f2003000000"
                ), 10),
        'X':
        deserialize_header(
            bfh("0000002067f1857f54b7fef732cb4940f7d1b339472b3514660711a820330fd09d8fba6b03b243756c25053253aeda309604363460a3911015929e68705bd89dff6fe0649b29f45bffff7f2002000000"
                ), 11),
        'Y':
        deserialize_header(
            bfh("00000020db33c9768a9e5f7c37d0f09aad88d48165946c87d08f7d63793f07b5c08c527fd67cb902a7d807cee7676cb543feec3e053aa824d5dfb528d5b94f9760313d9d9b29f45bffff7f2000000000"
                ), 12),
        'Z':
        deserialize_header(
            bfh("0000002047822b67940e337fda38be6f13390b3596e4dea2549250256879722073824e7f0f2596c29203f8a0f71ae94193092dc8f113be3dbee4579f1e649fa3d6dcc38c622ef45bffff7f2003000000"
                ), 13),
    }
    # tree of headers:
    #                                            - M <- N <- X <- Y <- Z
    #                                          /
    #                             - G <- H <- I <- J <- K <- L
    #                           /
    # A <- B <- C <- D <- E <- F <- O <- P <- Q <- R <- S <- T <- U

    @classmethod
    def setUpClass(cls):
        super().setUpClass()
        constants.set_regtest()

    @classmethod
    def tearDownClass(cls):
        super().tearDownClass()
        constants.set_mainnet()

    def setUp(self):
        super().setUp()
        self.data_dir = self.electrumsys_path
        make_dir(os.path.join(self.data_dir, 'forks'))
        self.config = SimpleConfig({'electrumsys_path': self.data_dir})
        blockchain.blockchains = {}

    def _append_header(self, chain: Blockchain, header: dict):
        self.assertTrue(chain.can_connect(header))
        chain.save_header(header)

    def test_get_height_of_last_common_block_with_chain(self):
        blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain(
            config=self.config,
            forkpoint=0,
            parent=None,
            forkpoint_hash=constants.net.GENESIS,
            prev_hash=None)
        open(chain_u.path(), 'w+').close()
        self._append_header(chain_u, self.HEADERS['A'])
        self._append_header(chain_u, self.HEADERS['B'])
        self._append_header(chain_u, self.HEADERS['C'])
        self._append_header(chain_u, self.HEADERS['D'])
        self._append_header(chain_u, self.HEADERS['E'])
        self._append_header(chain_u, self.HEADERS['F'])
        self._append_header(chain_u, self.HEADERS['O'])
        self._append_header(chain_u, self.HEADERS['P'])
        self._append_header(chain_u, self.HEADERS['Q'])

        chain_l = chain_u.fork(self.HEADERS['G'])
        self._append_header(chain_l, self.HEADERS['H'])
        self._append_header(chain_l, self.HEADERS['I'])
        self._append_header(chain_l, self.HEADERS['J'])
        self._append_header(chain_l, self.HEADERS['K'])
        self._append_header(chain_l, self.HEADERS['L'])

        self.assertEqual({
            chain_u: 8,
            chain_l: 5
        }, chain_u.get_parent_heights())
        self.assertEqual({chain_l: 11}, chain_l.get_parent_heights())

        chain_z = chain_l.fork(self.HEADERS['M'])
        self._append_header(chain_z, self.HEADERS['N'])
        self._append_header(chain_z, self.HEADERS['X'])
        self._append_header(chain_z, self.HEADERS['Y'])
        self._append_header(chain_z, self.HEADERS['Z'])

        self.assertEqual({
            chain_u: 8,
            chain_z: 5
        }, chain_u.get_parent_heights())
        self.assertEqual({
            chain_l: 11,
            chain_z: 8
        }, chain_l.get_parent_heights())
        self.assertEqual({chain_z: 13}, chain_z.get_parent_heights())
        self.assertEqual(
            5, chain_u.get_height_of_last_common_block_with_chain(chain_l))
        self.assertEqual(
            5, chain_l.get_height_of_last_common_block_with_chain(chain_u))
        self.assertEqual(
            5, chain_u.get_height_of_last_common_block_with_chain(chain_z))
        self.assertEqual(
            5, chain_z.get_height_of_last_common_block_with_chain(chain_u))
        self.assertEqual(
            8, chain_l.get_height_of_last_common_block_with_chain(chain_z))
        self.assertEqual(
            8, chain_z.get_height_of_last_common_block_with_chain(chain_l))

        self._append_header(chain_u, self.HEADERS['R'])
        self._append_header(chain_u, self.HEADERS['S'])
        self._append_header(chain_u, self.HEADERS['T'])
        self._append_header(chain_u, self.HEADERS['U'])

        self.assertEqual({
            chain_u: 12,
            chain_z: 5
        }, chain_u.get_parent_heights())
        self.assertEqual({
            chain_l: 11,
            chain_z: 8
        }, chain_l.get_parent_heights())
        self.assertEqual({chain_z: 13}, chain_z.get_parent_heights())
        self.assertEqual(
            5, chain_u.get_height_of_last_common_block_with_chain(chain_l))
        self.assertEqual(
            5, chain_l.get_height_of_last_common_block_with_chain(chain_u))
        self.assertEqual(
            5, chain_u.get_height_of_last_common_block_with_chain(chain_z))
        self.assertEqual(
            5, chain_z.get_height_of_last_common_block_with_chain(chain_u))
        self.assertEqual(
            8, chain_l.get_height_of_last_common_block_with_chain(chain_z))
        self.assertEqual(
            8, chain_z.get_height_of_last_common_block_with_chain(chain_l))

    def test_parents_after_forking(self):
        blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain(
            config=self.config,
            forkpoint=0,
            parent=None,
            forkpoint_hash=constants.net.GENESIS,
            prev_hash=None)
        open(chain_u.path(), 'w+').close()
        self._append_header(chain_u, self.HEADERS['A'])
        self._append_header(chain_u, self.HEADERS['B'])
        self._append_header(chain_u, self.HEADERS['C'])
        self._append_header(chain_u, self.HEADERS['D'])
        self._append_header(chain_u, self.HEADERS['E'])
        self._append_header(chain_u, self.HEADERS['F'])
        self._append_header(chain_u, self.HEADERS['O'])
        self._append_header(chain_u, self.HEADERS['P'])
        self._append_header(chain_u, self.HEADERS['Q'])

        self.assertEqual(None, chain_u.parent)

        chain_l = chain_u.fork(self.HEADERS['G'])
        self._append_header(chain_l, self.HEADERS['H'])
        self._append_header(chain_l, self.HEADERS['I'])
        self._append_header(chain_l, self.HEADERS['J'])
        self._append_header(chain_l, self.HEADERS['K'])
        self._append_header(chain_l, self.HEADERS['L'])

        self.assertEqual(None, chain_l.parent)
        self.assertEqual(chain_l, chain_u.parent)

        chain_z = chain_l.fork(self.HEADERS['M'])
        self._append_header(chain_z, self.HEADERS['N'])
        self._append_header(chain_z, self.HEADERS['X'])
        self._append_header(chain_z, self.HEADERS['Y'])
        self._append_header(chain_z, self.HEADERS['Z'])

        self.assertEqual(chain_z, chain_u.parent)
        self.assertEqual(chain_z, chain_l.parent)
        self.assertEqual(None, chain_z.parent)

        self._append_header(chain_u, self.HEADERS['R'])
        self._append_header(chain_u, self.HEADERS['S'])
        self._append_header(chain_u, self.HEADERS['T'])
        self._append_header(chain_u, self.HEADERS['U'])

        self.assertEqual(chain_z, chain_u.parent)
        self.assertEqual(chain_z, chain_l.parent)
        self.assertEqual(None, chain_z.parent)

    def test_forking_and_swapping(self):
        blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain(
            config=self.config,
            forkpoint=0,
            parent=None,
            forkpoint_hash=constants.net.GENESIS,
            prev_hash=None)
        open(chain_u.path(), 'w+').close()

        self._append_header(chain_u, self.HEADERS['A'])
        self._append_header(chain_u, self.HEADERS['B'])
        self._append_header(chain_u, self.HEADERS['C'])
        self._append_header(chain_u, self.HEADERS['D'])
        self._append_header(chain_u, self.HEADERS['E'])
        self._append_header(chain_u, self.HEADERS['F'])
        self._append_header(chain_u, self.HEADERS['O'])
        self._append_header(chain_u, self.HEADERS['P'])
        self._append_header(chain_u, self.HEADERS['Q'])
        self._append_header(chain_u, self.HEADERS['R'])

        chain_l = chain_u.fork(self.HEADERS['G'])
        self._append_header(chain_l, self.HEADERS['H'])
        self._append_header(chain_l, self.HEADERS['I'])
        self._append_header(chain_l, self.HEADERS['J'])

        # do checks
        self.assertEqual(2, len(blockchain.blockchains))
        self.assertEqual(1,
                         len(os.listdir(os.path.join(self.data_dir, "forks"))))
        self.assertEqual(0, chain_u.forkpoint)
        self.assertEqual(None, chain_u.parent)
        self.assertEqual(constants.net.GENESIS, chain_u._forkpoint_hash)
        self.assertEqual(None, chain_u._prev_hash)
        self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"),
                         chain_u.path())
        self.assertEqual(10 * 80, os.stat(chain_u.path()).st_size)
        self.assertEqual(6, chain_l.forkpoint)
        self.assertEqual(chain_u, chain_l.parent)
        self.assertEqual(hash_header(self.HEADERS['G']),
                         chain_l._forkpoint_hash)
        self.assertEqual(hash_header(self.HEADERS['F']), chain_l._prev_hash)
        self.assertEqual(
            os.path.join(
                self.data_dir, "forks",
                "fork2_6_5c400c7966145d56291080b6482716a16aa644eefe590f984c1da0ee46ed33b8_711a2e2a701354121a33660f45c9f9f3c4bbdb4441114c39ca837f6e7f689ee1"
            ), chain_l.path())
        self.assertEqual(4 * 80, os.stat(chain_l.path()).st_size)

        self._append_header(chain_l, self.HEADERS['K'])

        # chains were swapped, do checks
        self.assertEqual(2, len(blockchain.blockchains))
        self.assertEqual(1,
                         len(os.listdir(os.path.join(self.data_dir, "forks"))))
        self.assertEqual(6, chain_u.forkpoint)
        self.assertEqual(chain_l, chain_u.parent)
        self.assertEqual(hash_header(self.HEADERS['O']),
                         chain_u._forkpoint_hash)
        self.assertEqual(hash_header(self.HEADERS['F']), chain_u._prev_hash)
        self.assertEqual(
            os.path.join(
                self.data_dir, "forks",
                "fork2_6_5c400c7966145d56291080b6482716a16aa644eefe590f984c1da0ee46ed33b8_aff81830e28e01ef7d23277c56779a6b93f251a2d50dcc09d7c87d119e1e8ab"
            ), chain_u.path())
        self.assertEqual(4 * 80, os.stat(chain_u.path()).st_size)
        self.assertEqual(0, chain_l.forkpoint)
        self.assertEqual(None, chain_l.parent)
        self.assertEqual(constants.net.GENESIS, chain_l._forkpoint_hash)
        self.assertEqual(None, chain_l._prev_hash)
        self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"),
                         chain_l.path())
        self.assertEqual(11 * 80, os.stat(chain_l.path()).st_size)
        for b in (chain_u, chain_l):
            self.assertTrue(
                all([
                    b.can_connect(b.read_header(i), False)
                    for i in range(b.height())
                ]))

        self._append_header(chain_u, self.HEADERS['S'])
        self._append_header(chain_u, self.HEADERS['T'])
        self._append_header(chain_u, self.HEADERS['U'])
        self._append_header(chain_l, self.HEADERS['L'])

        chain_z = chain_l.fork(self.HEADERS['M'])
        self._append_header(chain_z, self.HEADERS['N'])
        self._append_header(chain_z, self.HEADERS['X'])
        self._append_header(chain_z, self.HEADERS['Y'])
        self._append_header(chain_z, self.HEADERS['Z'])

        # chain_z became best chain, do checks
        self.assertEqual(3, len(blockchain.blockchains))
        self.assertEqual(2,
                         len(os.listdir(os.path.join(self.data_dir, "forks"))))
        self.assertEqual(0, chain_z.forkpoint)
        self.assertEqual(None, chain_z.parent)
        self.assertEqual(constants.net.GENESIS, chain_z._forkpoint_hash)
        self.assertEqual(None, chain_z._prev_hash)
        self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"),
                         chain_z.path())
        self.assertEqual(14 * 80, os.stat(chain_z.path()).st_size)
        self.assertEqual(9, chain_l.forkpoint)
        self.assertEqual(chain_z, chain_l.parent)
        self.assertEqual(hash_header(self.HEADERS['J']),
                         chain_l._forkpoint_hash)
        self.assertEqual(hash_header(self.HEADERS['I']), chain_l._prev_hash)
        self.assertEqual(
            os.path.join(
                self.data_dir, "forks",
                "fork2_9_2874a1277687ab8042eff9916256b860a5b0a08b0038456c5a4a37d3bdf3656a_6e1acd473503ce0ee3cee916ca07db2f656b48baf8968f999189545316423bbb"
            ), chain_l.path())
        self.assertEqual(3 * 80, os.stat(chain_l.path()).st_size)
        self.assertEqual(6, chain_u.forkpoint)
        self.assertEqual(chain_z, chain_u.parent)
        self.assertEqual(hash_header(self.HEADERS['O']),
                         chain_u._forkpoint_hash)
        self.assertEqual(hash_header(self.HEADERS['F']), chain_u._prev_hash)
        self.assertEqual(
            os.path.join(
                self.data_dir, "forks",
                "fork2_6_5c400c7966145d56291080b6482716a16aa644eefe590f984c1da0ee46ed33b8_aff81830e28e01ef7d23277c56779a6b93f251a2d50dcc09d7c87d119e1e8ab"
            ), chain_u.path())
        self.assertEqual(7 * 80, os.stat(chain_u.path()).st_size)
        for b in (chain_u, chain_l, chain_z):
            self.assertTrue(
                all([
                    b.can_connect(b.read_header(i), False)
                    for i in range(b.height())
                ]))

        self.assertEqual(constants.net.GENESIS, chain_z.get_hash(0))
        self.assertEqual(hash_header(self.HEADERS['F']), chain_z.get_hash(5))
        self.assertEqual(hash_header(self.HEADERS['G']), chain_z.get_hash(6))
        self.assertEqual(hash_header(self.HEADERS['I']), chain_z.get_hash(8))
        self.assertEqual(hash_header(self.HEADERS['M']), chain_z.get_hash(9))
        self.assertEqual(hash_header(self.HEADERS['Z']), chain_z.get_hash(13))

    def test_doing_multiple_swaps_after_single_new_header(self):
        blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain(
            config=self.config,
            forkpoint=0,
            parent=None,
            forkpoint_hash=constants.net.GENESIS,
            prev_hash=None)
        open(chain_u.path(), 'w+').close()

        self._append_header(chain_u, self.HEADERS['A'])
        self._append_header(chain_u, self.HEADERS['B'])
        self._append_header(chain_u, self.HEADERS['C'])
        self._append_header(chain_u, self.HEADERS['D'])
        self._append_header(chain_u, self.HEADERS['E'])
        self._append_header(chain_u, self.HEADERS['F'])
        self._append_header(chain_u, self.HEADERS['O'])
        self._append_header(chain_u, self.HEADERS['P'])
        self._append_header(chain_u, self.HEADERS['Q'])
        self._append_header(chain_u, self.HEADERS['R'])
        self._append_header(chain_u, self.HEADERS['S'])

        self.assertEqual(1, len(blockchain.blockchains))
        self.assertEqual(0,
                         len(os.listdir(os.path.join(self.data_dir, "forks"))))

        chain_l = chain_u.fork(self.HEADERS['G'])
        self._append_header(chain_l, self.HEADERS['H'])
        self._append_header(chain_l, self.HEADERS['I'])
        self._append_header(chain_l, self.HEADERS['J'])
        self._append_header(chain_l, self.HEADERS['K'])
        # now chain_u is best chain, but it's tied with chain_l

        self.assertEqual(2, len(blockchain.blockchains))
        self.assertEqual(1,
                         len(os.listdir(os.path.join(self.data_dir, "forks"))))

        chain_z = chain_l.fork(self.HEADERS['M'])
        self._append_header(chain_z, self.HEADERS['N'])
        self._append_header(chain_z, self.HEADERS['X'])

        self.assertEqual(3, len(blockchain.blockchains))
        self.assertEqual(2,
                         len(os.listdir(os.path.join(self.data_dir, "forks"))))

        # chain_z became best chain, do checks
        self.assertEqual(0, chain_z.forkpoint)
        self.assertEqual(None, chain_z.parent)
        self.assertEqual(constants.net.GENESIS, chain_z._forkpoint_hash)
        self.assertEqual(None, chain_z._prev_hash)
        self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"),
                         chain_z.path())
        self.assertEqual(12 * 80, os.stat(chain_z.path()).st_size)
        self.assertEqual(9, chain_l.forkpoint)
        self.assertEqual(chain_z, chain_l.parent)
        self.assertEqual(hash_header(self.HEADERS['J']),
                         chain_l._forkpoint_hash)
        self.assertEqual(hash_header(self.HEADERS['I']), chain_l._prev_hash)
        self.assertEqual(
            os.path.join(
                self.data_dir, "forks",
                "fork2_9_2874a1277687ab8042eff9916256b860a5b0a08b0038456c5a4a37d3bdf3656a_6e1acd473503ce0ee3cee916ca07db2f656b48baf8968f999189545316423bbb"
            ), chain_l.path())
        self.assertEqual(2 * 80, os.stat(chain_l.path()).st_size)
        self.assertEqual(6, chain_u.forkpoint)
        self.assertEqual(chain_z, chain_u.parent)
        self.assertEqual(hash_header(self.HEADERS['O']),
                         chain_u._forkpoint_hash)
        self.assertEqual(hash_header(self.HEADERS['F']), chain_u._prev_hash)
        self.assertEqual(
            os.path.join(
                self.data_dir, "forks",
                "fork2_6_5c400c7966145d56291080b6482716a16aa644eefe590f984c1da0ee46ed33b8_aff81830e28e01ef7d23277c56779a6b93f251a2d50dcc09d7c87d119e1e8ab"
            ), chain_u.path())
        self.assertEqual(5 * 80, os.stat(chain_u.path()).st_size)

        self.assertEqual(constants.net.GENESIS, chain_z.get_hash(0))
        self.assertEqual(hash_header(self.HEADERS['F']), chain_z.get_hash(5))
        self.assertEqual(hash_header(self.HEADERS['G']), chain_z.get_hash(6))
        self.assertEqual(hash_header(self.HEADERS['I']), chain_z.get_hash(8))
        self.assertEqual(hash_header(self.HEADERS['M']), chain_z.get_hash(9))
        self.assertEqual(hash_header(self.HEADERS['X']), chain_z.get_hash(11))

        for b in (chain_u, chain_l, chain_z):
            self.assertTrue(
                all([
                    b.can_connect(b.read_header(i), False)
                    for i in range(b.height())
                ]))

    def get_chains_that_contain_header_helper(self, header: dict):
        height = header['block_height']
        header_hash = hash_header(header)
        return blockchain.get_chains_that_contain_header(height, header_hash)

    def test_get_chains_that_contain_header(self):
        blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain(
            config=self.config,
            forkpoint=0,
            parent=None,
            forkpoint_hash=constants.net.GENESIS,
            prev_hash=None)
        open(chain_u.path(), 'w+').close()
        self._append_header(chain_u, self.HEADERS['A'])
        self._append_header(chain_u, self.HEADERS['B'])
        self._append_header(chain_u, self.HEADERS['C'])
        self._append_header(chain_u, self.HEADERS['D'])
        self._append_header(chain_u, self.HEADERS['E'])
        self._append_header(chain_u, self.HEADERS['F'])
        self._append_header(chain_u, self.HEADERS['O'])
        self._append_header(chain_u, self.HEADERS['P'])
        self._append_header(chain_u, self.HEADERS['Q'])

        chain_l = chain_u.fork(self.HEADERS['G'])
        self._append_header(chain_l, self.HEADERS['H'])
        self._append_header(chain_l, self.HEADERS['I'])
        self._append_header(chain_l, self.HEADERS['J'])
        self._append_header(chain_l, self.HEADERS['K'])
        self._append_header(chain_l, self.HEADERS['L'])

        chain_z = chain_l.fork(self.HEADERS['M'])

        self.assertEqual([chain_l, chain_z, chain_u],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['A']))
        self.assertEqual([chain_l, chain_z, chain_u],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['C']))
        self.assertEqual([chain_l, chain_z, chain_u],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['F']))
        self.assertEqual([chain_l, chain_z],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['G']))
        self.assertEqual([chain_l, chain_z],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['I']))
        self.assertEqual([chain_z],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['M']))
        self.assertEqual([chain_l],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['K']))

        self._append_header(chain_z, self.HEADERS['N'])
        self._append_header(chain_z, self.HEADERS['X'])
        self._append_header(chain_z, self.HEADERS['Y'])
        self._append_header(chain_z, self.HEADERS['Z'])

        self.assertEqual([chain_z, chain_l, chain_u],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['A']))
        self.assertEqual([chain_z, chain_l, chain_u],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['C']))
        self.assertEqual([chain_z, chain_l, chain_u],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['F']))
        self.assertEqual([chain_u],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['O']))
        self.assertEqual([chain_z, chain_l],
                         self.get_chains_that_contain_header_helper(
                             self.HEADERS['I']))

    def test_target_to_bits(self):
        # https://github.com/syscoin/syscoin/blob/7fcf53f7b4524572d1d0c9a5fdc388e87eb02416/src/arith_uint256.h#L269
        self.assertEqual(0x05123456, Blockchain.target_to_bits(0x1234560000))
        self.assertEqual(0x0600c0de, Blockchain.target_to_bits(0xc0de000000))

        # tests from https://github.com/syscoin/syscoin/blob/a7d17daa5cd8bf6398d5f8d7e77290009407d6ea/src/test/arith_uint256_tests.cpp#L411
        tuples = (
            (0,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x00123456,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x01003456,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x02000056,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x03000000,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x04000000,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x00923456,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x01803456,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x02800056,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x03800000,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x04800000,
             0x0000000000000000000000000000000000000000000000000000000000000000,
             0),
            (0x01123456,
             0x0000000000000000000000000000000000000000000000000000000000000012,
             0x01120000),
            (0x02123456,
             0x0000000000000000000000000000000000000000000000000000000000001234,
             0x02123400),
            (0x03123456,
             0x0000000000000000000000000000000000000000000000000000000000123456,
             0x03123456),
            (0x04123456,
             0x0000000000000000000000000000000000000000000000000000000012345600,
             0x04123456),
            (0x05009234,
             0x0000000000000000000000000000000000000000000000000000000092340000,
             0x05009234),
            (0x20123456,
             0x1234560000000000000000000000000000000000000000000000000000000000,
             0x20123456),
        )
        for nbits1, target, nbits2 in tuples:
            with self.subTest(original_compact_nbits=nbits1.to_bytes(
                    length=4, byteorder="big").hex()):
                num = Blockchain.bits_to_target(nbits1)
                self.assertEqual(target, num)
                self.assertEqual(nbits2, Blockchain.target_to_bits(num))

        # Make sure that we don't generate compacts with the 0x00800000 bit set
        self.assertEqual(0x02008000, Blockchain.target_to_bits(0x80))

        with self.assertRaises(Exception):  # target cannot be negative
            Blockchain.bits_to_target(0x01fedcba)
        with self.assertRaises(Exception):  # target cannot be negative
            Blockchain.bits_to_target(0x04923456)
        with self.assertRaises(Exception):  # overflow
            Blockchain.bits_to_target(0xff123456)
Exemplo n.º 20
0
 def test_read_tlv_stream_tests1(self):
     # from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-failures
     lnser = LNSerializer()
     for tlv_stream_name in ("n1", "n2"):
         with self.subTest(tlv_stream_name=tlv_stream_name):
             with self.assertRaises(UnexpectedEndOfStream):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnexpectedEndOfStream):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd01")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(FieldEncodingNotMinimal):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd000100")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnexpectedEndOfStream):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd0101")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnexpectedEndOfStream):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnexpectedEndOfStream):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd26")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnexpectedEndOfStream):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd2602")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(FieldEncodingNotMinimal):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd000100")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnexpectedEndOfStream):
                 lnser.read_tlv_stream(fd=io.BytesIO(
                     bfh("0ffd0201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
                         )),
                                       tlv_stream_name="n1")
             with self.assertRaises(UnknownMandatoryTLVRecordType):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("1200")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnknownMandatoryTLVRecordType):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd010200")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnknownMandatoryTLVRecordType):
                 lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0100000200")),
                                       tlv_stream_name=tlv_stream_name)
             with self.assertRaises(UnknownMandatoryTLVRecordType):
                 lnser.read_tlv_stream(fd=io.BytesIO(
                     bfh("ff010000000000000200")),
                                       tlv_stream_name=tlv_stream_name)
     with self.assertRaises(MsgTrailingGarbage):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("0109ffffffffffffffffff")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("010100")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020001")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103000100")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("010400010000")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050001000000")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106000100000000")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("010700010000000000")),
                               tlv_stream_name="n1")
     with self.assertRaises(FieldEncodingNotMinimal):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080001000000000000")),
                               tlv_stream_name="n1")
     with self.assertRaises(UnexpectedEndOfStream):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("020701010101010101")),
                               tlv_stream_name="n1")
     with self.assertRaises(MsgTrailingGarbage):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("0209010101010101010101")),
                               tlv_stream_name="n1")
     with self.assertRaises(UnexpectedEndOfStream):
         lnser.read_tlv_stream(fd=io.BytesIO(
             bfh("0321023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb"
                 )),
                               tlv_stream_name="n1")
     with self.assertRaises(UnexpectedEndOfStream):
         lnser.read_tlv_stream(fd=io.BytesIO(
             bfh("0329023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001"
                 )),
                               tlv_stream_name="n1")
     with self.assertRaises(UnexpectedEndOfStream):
         lnser.read_tlv_stream(fd=io.BytesIO(
             bfh("0330023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb000000000000000100000000000001"
                 )),
                               tlv_stream_name="n1")
     # check if ECC point is valid?... skip for now.
     #with self.assertRaises(Exception):
     #    lnser.read_tlv_stream(fd=io.BytesIO(bfh("0331043da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002")), tlv_stream_name="n1")
     with self.assertRaises(MsgTrailingGarbage):
         lnser.read_tlv_stream(fd=io.BytesIO(
             bfh("0332023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001000000000000000001"
                 )),
                               tlv_stream_name="n1")
     with self.assertRaises(UnexpectedEndOfStream):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe00")),
                               tlv_stream_name="n1")
     with self.assertRaises(UnexpectedEndOfStream):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe0101")),
                               tlv_stream_name="n1")
     with self.assertRaises(MsgTrailingGarbage):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe03010101")),
                               tlv_stream_name="n1")
     with self.assertRaises(UnknownMandatoryTLVRecordType):
         lnser.read_tlv_stream(fd=io.BytesIO(bfh("0000")),
                               tlv_stream_name="n1")
Exemplo n.º 21
0
 def test_encode_decode_msg__commitment_signed(self):
     # "commitment_signed" is interesting because of the "htlc_signature" field,
     #  which is a concatenation of multiple ("num_htlcs") signatures.
     # 5 htlcs
     self.assertEqual(
         bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"
             ),
         encode_msg(
             "commitment_signed",
             channel_id=
             b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
             signature=
             b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-",
             num_htlcs=5,
             htlc_signature=bfh(
                 "6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"
             ),
         ))
     self.assertEqual(('commitment_signed', {
         'channel_id':
         b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
         'signature':
         b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-",
         'num_htlcs':
         5,
         'htlc_signature':
         bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"
             )
     }),
                      decode_msg(
                          bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"
                              )))
     # single htlc
     self.assertEqual(
         bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"
             ),
         encode_msg(
             "commitment_signed",
             channel_id=
             b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
             signature=
             b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e',
             num_htlcs=1,
             htlc_signature=bfh(
                 "2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"
             ),
         ))
     self.assertEqual(('commitment_signed', {
         'channel_id':
         b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
         'signature':
         b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e',
         'num_htlcs':
         1,
         'htlc_signature':
         bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"
             )
     }),
                      decode_msg(
                          bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"
                              )))
     # zero htlcs
     self.assertEqual(
         bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000"
             ),
         encode_msg(
             "commitment_signed",
             channel_id=
             b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
             signature=
             b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E',
             num_htlcs=0,
             htlc_signature=bfh(""),
         ))
     self.assertEqual(('commitment_signed', {
         'channel_id':
         b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
         'signature':
         b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E',
         'num_htlcs': 0,
         'htlc_signature': bfh("")
     }),
                      decode_msg(
                          bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000"
                              )))
Exemplo n.º 22
0
 def setUp(self):
     super().setUp()
     self.header = deserialize_header(bfh(self.valid_header), 100)
Exemplo n.º 23
0
    def sign_transaction(self, tx, password):
        if tx.is_complete():
            return
        inputs = []
        inputsPaths = []
        chipInputs = []
        redeemScripts = []
        changePath = ""
        output = None
        p2shTransaction = False
        segwitTransaction = False
        pin = ""
        client_ledger = self.get_client(
        )  # prompt for the PIN before displaying the dialog if necessary
        client_electrumsys = self.get_client_electrumsys()
        assert client_electrumsys

        # Fetch inputs of the transaction to sign
        for txin in tx.inputs():
            if txin.is_coinbase_input():
                self.give_error(
                    "Coinbase not supported")  # should never happen

            if txin.script_type in ['p2sh']:
                p2shTransaction = True

            if txin.script_type in ['p2wpkh-p2sh', 'p2wsh-p2sh']:
                if not client_electrumsys.supports_segwit():
                    self.give_error(MSG_NEEDS_FW_UPDATE_SEGWIT)
                segwitTransaction = True

            if txin.script_type in ['p2wpkh', 'p2wsh']:
                if not client_electrumsys.supports_native_segwit():
                    self.give_error(MSG_NEEDS_FW_UPDATE_SEGWIT)
                segwitTransaction = True

            my_pubkey, full_path = self.find_my_pubkey_in_txinout(txin)
            if not full_path:
                self.give_error("No matching pubkey for sign_transaction"
                                )  # should never happen
            full_path = convert_bip32_intpath_to_strpath(full_path)[2:]

            redeemScript = Transaction.get_preimage_script(txin)
            txin_prev_tx = txin.utxo
            if txin_prev_tx is None and not txin.is_segwit():
                raise UserFacingException(
                    _('Missing previous tx for legacy input.'))
            txin_prev_tx_raw = txin_prev_tx.serialize(
            ) if txin_prev_tx else None
            inputs.append([
                txin_prev_tx_raw, txin.prevout.out_idx, redeemScript,
                txin.prevout.txid.hex(), my_pubkey, txin.nsequence,
                txin.value_sats()
            ])
            inputsPaths.append(full_path)

        # Sanity check
        if p2shTransaction:
            for txin in tx.inputs():
                if txin.script_type != 'p2sh':
                    self.give_error(
                        "P2SH / regular input mixed in same transaction not supported"
                    )  # should never happen

        txOutput = var_int(len(tx.outputs()))
        for o in tx.outputs():
            txOutput += int_to_hex(o.value, 8)
            script = o.scriptpubkey.hex()
            txOutput += var_int(len(script) // 2)
            txOutput += script
        txOutput = bfh(txOutput)

        if not client_electrumsys.supports_multi_output():
            if len(tx.outputs()) > 2:
                self.give_error(
                    "Transaction with more than 2 outputs not supported")
        for txout in tx.outputs():
            if client_electrumsys.is_hw1(
            ) and txout.address and not is_b58_address(txout.address):
                self.give_error(
                    _("This {} device can only send to base58 addresses.").
                    format(self.device))
            if not txout.address:
                if client_electrumsys.is_hw1():
                    self.give_error(
                        _("Only address outputs are supported by {}").format(
                            self.device))
                # note: max_size based on https://github.com/LedgerHQ/ledger-app-btc/commit/3a78dee9c0484821df58975803e40d58fbfc2c38#diff-c61ccd96a6d8b54d48f54a3bc4dfa7e2R26
                validate_op_return_output(txout, max_size=190)

        # Output "change" detection
        # - only one output and one change is authorized (for hw.1 and nano)
        # - at most one output can bypass confirmation (~change) (for all)
        if not p2shTransaction:
            has_change = False
            any_output_on_change_branch = is_any_tx_output_on_change_branch(tx)
            for txout in tx.outputs():
                if txout.is_mine and len(tx.outputs()) > 1 \
                        and not has_change:
                    # prioritise hiding outputs on the 'change' branch from user
                    # because no more than one change address allowed
                    if txout.is_change == any_output_on_change_branch:
                        my_pubkey, changePath = self.find_my_pubkey_in_txinout(
                            txout)
                        assert changePath
                        changePath = convert_bip32_intpath_to_strpath(
                            changePath)[2:]
                        has_change = True
                    else:
                        output = txout.address
                else:
                    output = txout.address

        self.handler.show_message(
            _("Confirm Transaction on your Ledger device..."))
        try:
            # Get trusted inputs from the original transactions
            for utxo in inputs:
                sequence = int_to_hex(utxo[5], 4)
                if segwitTransaction and not client_electrumsys.supports_segwit_trustedInputs(
                ):
                    tmp = bfh(utxo[3])[::-1]
                    tmp += bfh(int_to_hex(utxo[1], 4))
                    tmp += bfh(int_to_hex(utxo[6], 8))  # txin['value']
                    chipInputs.append({
                        'value': tmp,
                        'witness': True,
                        'sequence': sequence
                    })
                    redeemScripts.append(bfh(utxo[2]))
                elif (not p2shTransaction
                      ) or client_electrumsys.supports_multi_output():
                    txtmp = bitcoinTransaction(bfh(utxo[0]))
                    trustedInput = client_ledger.getTrustedInput(
                        txtmp, utxo[1])
                    trustedInput['sequence'] = sequence
                    if segwitTransaction:
                        trustedInput['witness'] = True
                    chipInputs.append(trustedInput)
                    if p2shTransaction or segwitTransaction:
                        redeemScripts.append(bfh(utxo[2]))
                    else:
                        redeemScripts.append(txtmp.outputs[utxo[1]].script)
                else:
                    tmp = bfh(utxo[3])[::-1]
                    tmp += bfh(int_to_hex(utxo[1], 4))
                    chipInputs.append({'value': tmp, 'sequence': sequence})
                    redeemScripts.append(bfh(utxo[2]))

            # Sign all inputs
            firstTransaction = True
            inputIndex = 0
            rawTx = tx.serialize_to_network()
            client_ledger.enableAlternate2fa(False)
            if segwitTransaction:
                client_ledger.startUntrustedTransaction(
                    True,
                    inputIndex,
                    chipInputs,
                    redeemScripts[inputIndex],
                    version=tx.version)
                # we don't set meaningful outputAddress, amount and fees
                # as we only care about the alternateEncoding==True branch
                outputData = client_ledger.finalizeInput(
                    b'', 0, 0, changePath, bfh(rawTx))
                outputData['outputData'] = txOutput
                if outputData['confirmationNeeded']:
                    outputData['address'] = output
                    self.handler.finished()
                    # do the authenticate dialog and get pin:
                    pin = self.handler.get_auth(outputData,
                                                client=client_electrumsys)
                    if not pin:
                        raise UserWarning()
                    self.handler.show_message(
                        _("Confirmed. Signing Transaction..."))
                while inputIndex < len(inputs):
                    singleInput = [chipInputs[inputIndex]]
                    client_ledger.startUntrustedTransaction(
                        False,
                        0,
                        singleInput,
                        redeemScripts[inputIndex],
                        version=tx.version)
                    inputSignature = client_ledger.untrustedHashSign(
                        inputsPaths[inputIndex], pin, lockTime=tx.locktime)
                    inputSignature[0] = 0x30  # force for 1.4.9+
                    my_pubkey = inputs[inputIndex][4]
                    tx.add_signature_to_txin(txin_idx=inputIndex,
                                             signing_pubkey=my_pubkey.hex(),
                                             sig=inputSignature.hex())
                    inputIndex = inputIndex + 1
            else:
                while inputIndex < len(inputs):
                    client_ledger.startUntrustedTransaction(
                        firstTransaction,
                        inputIndex,
                        chipInputs,
                        redeemScripts[inputIndex],
                        version=tx.version)
                    # we don't set meaningful outputAddress, amount and fees
                    # as we only care about the alternateEncoding==True branch
                    outputData = client_ledger.finalizeInput(
                        b'', 0, 0, changePath, bfh(rawTx))
                    outputData['outputData'] = txOutput
                    if outputData['confirmationNeeded']:
                        outputData['address'] = output
                        self.handler.finished()
                        # do the authenticate dialog and get pin:
                        pin = self.handler.get_auth(outputData,
                                                    client=client_electrumsys)
                        if not pin:
                            raise UserWarning()
                        self.handler.show_message(
                            _("Confirmed. Signing Transaction..."))
                    else:
                        # Sign input with the provided PIN
                        inputSignature = client_ledger.untrustedHashSign(
                            inputsPaths[inputIndex], pin, lockTime=tx.locktime)
                        inputSignature[0] = 0x30  # force for 1.4.9+
                        my_pubkey = inputs[inputIndex][4]
                        tx.add_signature_to_txin(
                            txin_idx=inputIndex,
                            signing_pubkey=my_pubkey.hex(),
                            sig=inputSignature.hex())
                        inputIndex = inputIndex + 1
                    firstTransaction = False
        except UserWarning:
            self.handler.show_error(_('Cancelled by user'))
            return
        except BTChipException as e:
            if e.sw in (0x6985, 0x6d00):  # cancelled by user
                return
            elif e.sw == 0x6982:
                raise  # pin lock. decorator will catch it
            else:
                self.logger.exception('')
                self.give_error(e, True)
        except BaseException as e:
            self.logger.exception('')
            self.give_error(e, True)
        finally:
            self.handler.finished()
Exemplo n.º 24
0
    def sign_transaction(self, tx, password):
        if tx.is_complete():
            return

        try:
            p2pkhTransaction = True
            inputhasharray = []
            hasharray = []
            pubkeyarray = []

            # Build hasharray from inputs
            for i, txin in enumerate(tx.inputs()):
                if txin.is_coinbase_input():
                    self.give_error(
                        "Coinbase not supported")  # should never happen

                if txin.script_type != 'p2pkh':
                    p2pkhTransaction = False

                my_pubkey, inputPath = self.find_my_pubkey_in_txinout(txin)
                if not inputPath:
                    self.give_error("No matching pubkey for sign_transaction"
                                    )  # should never happen
                inputPath = convert_bip32_intpath_to_strpath(inputPath)
                inputHash = sha256d(bfh(tx.serialize_preimage(i)))
                hasharray_i = {
                    'hash': to_hexstr(inputHash),
                    'keypath': inputPath
                }
                hasharray.append(hasharray_i)
                inputhasharray.append(inputHash)

            # Build pubkeyarray from outputs
            for txout in tx.outputs():
                assert txout.address
                if txout.is_change:
                    changePubkey, changePath = self.find_my_pubkey_in_txinout(
                        txout)
                    assert changePath
                    changePath = convert_bip32_intpath_to_strpath(changePath)
                    changePubkey = changePubkey.hex()
                    pubkeyarray_i = {
                        'pubkey': changePubkey,
                        'keypath': changePath
                    }
                    pubkeyarray.append(pubkeyarray_i)

            # Special serialization of the unsigned transaction for
            # the mobile verification app.
            # At the moment, verification only works for p2pkh transactions.
            if p2pkhTransaction:
                tx_copy = copy.deepcopy(tx)

                # monkey-patch method of tx_copy instance to change serialization
                def input_script(self,
                                 txin: PartialTxInput,
                                 *,
                                 estimate_size=False):
                    if txin.script_type == 'p2pkh':
                        return Transaction.get_preimage_script(txin)
                    raise Exception("unsupported type %s" % txin.script_type)

                tx_copy.input_script = input_script.__get__(
                    tx_copy, PartialTransaction)
                tx_dbb_serialized = tx_copy.serialize_to_network()
            else:
                # We only need this for the signing echo / verification.
                tx_dbb_serialized = None

            # Build sign command
            dbb_signatures = []
            steps = math.ceil(1.0 * len(hasharray) / self.maxInputs)
            for step in range(int(steps)):
                hashes = hasharray[step * self.maxInputs:(step + 1) *
                                   self.maxInputs]

                msg = {
                    "sign": {
                        "data": hashes,
                        "checkpub": pubkeyarray,
                    },
                }
                if tx_dbb_serialized is not None:
                    msg["sign"]["meta"] = to_hexstr(sha256d(tx_dbb_serialized))
                msg = json.dumps(msg).encode('ascii')
                dbb_client = self.plugin.get_client(self)

                if not dbb_client.is_paired():
                    raise Exception("Could not sign transaction.")

                reply = dbb_client.hid_send_encrypt(msg)
                if 'error' in reply:
                    raise Exception(reply['error']['message'])

                if 'echo' not in reply:
                    raise Exception("Could not sign transaction.")

                if self.plugin.is_mobile_paired(
                ) and tx_dbb_serialized is not None:
                    reply['tx'] = tx_dbb_serialized
                    self.plugin.comserver_post_notification(reply)

                if steps > 1:
                    self.handler.show_message(
                        _("Signing large transaction. Please be patient ...") +
                        "\n\n" +
                        _("To continue, touch the Digital Bitbox's blinking light for 3 seconds."
                          ) + " " +
                        _("(Touch {} of {})").format((step + 1), steps) +
                        "\n\n" +
                        _("To cancel, briefly touch the blinking light or wait for the timeout."
                          ) + "\n\n")
                else:
                    self.handler.show_message(
                        _("Signing transaction...") + "\n\n" +
                        _("To continue, touch the Digital Bitbox's blinking light for 3 seconds."
                          ) + "\n\n" +
                        _("To cancel, briefly touch the blinking light or wait for the timeout."
                          ))

                # Send twice, first returns an echo for smart verification
                reply = dbb_client.hid_send_encrypt(msg)
                self.handler.finished()

                if 'error' in reply:
                    if reply["error"].get('code') in (600, 601):
                        # aborted via LED short touch or timeout
                        raise UserCancelled()
                    raise Exception(reply['error']['message'])

                if 'sign' not in reply:
                    raise Exception("Could not sign transaction.")

                dbb_signatures.extend(reply['sign'])

            # Fill signatures
            if len(dbb_signatures) != len(tx.inputs()):
                raise Exception("Incorrect number of transactions signed."
                                )  # Should never occur
            for i, txin in enumerate(tx.inputs()):
                for pubkey_bytes in txin.pubkeys:
                    if txin.is_complete():
                        break
                    signed = dbb_signatures[i]
                    if 'recid' in signed:
                        # firmware > v2.1.1
                        recid = int(signed['recid'], 16)
                        s = binascii.unhexlify(signed['sig'])
                        h = inputhasharray[i]
                        pk = ecc.ECPubkey.from_sig_string(s, recid, h)
                        pk = pk.get_public_key_hex(compressed=True)
                    elif 'pubkey' in signed:
                        # firmware <= v2.1.1
                        pk = signed['pubkey']
                    if pk != pubkey_bytes.hex():
                        continue
                    sig_r = int(signed['sig'][:64], 16)
                    sig_s = int(signed['sig'][64:], 16)
                    sig = ecc.der_sig_from_r_and_s(sig_r, sig_s)
                    sig = to_hexstr(sig) + '01'
                    tx.add_signature_to_txin(txin_idx=i,
                                             signing_pubkey=pubkey_bytes.hex(),
                                             sig=sig)
        except UserCancelled:
            raise
        except BaseException as e:
            self.give_error(e, True)
        else:
            _logger.info(f"Transaction is_complete {tx.is_complete()}")
Exemplo n.º 25
0
 def get_xfp_int(self) -> int:
     xfp = self.get_root_fingerprint()
     assert xfp is not None
     return xfp_int_from_xfp_bytes(bfh(xfp))
Exemplo n.º 26
0
    def test_read_bigsize_int(self):
        self.assertEqual(0, read_bigsize_int(io.BytesIO(bfh("00"))))
        self.assertEqual(252, read_bigsize_int(io.BytesIO(bfh("fc"))))
        self.assertEqual(253, read_bigsize_int(io.BytesIO(bfh("fd00fd"))))
        self.assertEqual(65535, read_bigsize_int(io.BytesIO(bfh("fdffff"))))
        self.assertEqual(65536,
                         read_bigsize_int(io.BytesIO(bfh("fe00010000"))))
        self.assertEqual(4294967295,
                         read_bigsize_int(io.BytesIO(bfh("feffffffff"))))
        self.assertEqual(
            4294967296,
            read_bigsize_int(io.BytesIO(bfh("ff0000000100000000"))))
        self.assertEqual(
            18446744073709551615,
            read_bigsize_int(io.BytesIO(bfh("ffffffffffffffffff"))))

        with self.assertRaises(FieldEncodingNotMinimal):
            read_bigsize_int(io.BytesIO(bfh("fd00fc")))
        with self.assertRaises(FieldEncodingNotMinimal):
            read_bigsize_int(io.BytesIO(bfh("fe0000ffff")))
        with self.assertRaises(FieldEncodingNotMinimal):
            read_bigsize_int(io.BytesIO(bfh("ff00000000ffffffff")))
        with self.assertRaises(UnexpectedEndOfStream):
            read_bigsize_int(io.BytesIO(bfh("fd00")))
        with self.assertRaises(UnexpectedEndOfStream):
            read_bigsize_int(io.BytesIO(bfh("feffff")))
        with self.assertRaises(UnexpectedEndOfStream):
            read_bigsize_int(io.BytesIO(bfh("ffffffffff")))
        self.assertEqual(None, read_bigsize_int(io.BytesIO(bfh(""))))
        with self.assertRaises(UnexpectedEndOfStream):
            read_bigsize_int(io.BytesIO(bfh("fd")))
        with self.assertRaises(UnexpectedEndOfStream):
            read_bigsize_int(io.BytesIO(bfh("fe")))
        with self.assertRaises(UnexpectedEndOfStream):
            read_bigsize_int(io.BytesIO(bfh("ff")))
Exemplo n.º 27
0
    def test_read_tlv_stream_tests2(self):
        # from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-successes
        lnser = LNSerializer()
        for tlv_stream_name in ("n1", "n2"):
            with self.subTest(tlv_stream_name=tlv_stream_name):
                self.assertEqual({},
                                 lnser.read_tlv_stream(
                                     fd=io.BytesIO(bfh("")),
                                     tlv_stream_name=tlv_stream_name))
                self.assertEqual({},
                                 lnser.read_tlv_stream(
                                     fd=io.BytesIO(bfh("2100")),
                                     tlv_stream_name=tlv_stream_name))
                self.assertEqual({},
                                 lnser.read_tlv_stream(
                                     fd=io.BytesIO(bfh("fd020100")),
                                     tlv_stream_name=tlv_stream_name))
                self.assertEqual({},
                                 lnser.read_tlv_stream(
                                     fd=io.BytesIO(bfh("fd00fd00")),
                                     tlv_stream_name=tlv_stream_name))
                self.assertEqual({},
                                 lnser.read_tlv_stream(
                                     fd=io.BytesIO(bfh("fd00ff00")),
                                     tlv_stream_name=tlv_stream_name))
                self.assertEqual({},
                                 lnser.read_tlv_stream(
                                     fd=io.BytesIO(bfh("fe0200000100")),
                                     tlv_stream_name=tlv_stream_name))
                self.assertEqual(
                    {},
                    lnser.read_tlv_stream(fd=io.BytesIO(
                        bfh("ff020000000000000100")),
                                          tlv_stream_name=tlv_stream_name))

        self.assertEqual({"tlv1": {
            "amount_msat": 0
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(bfh("0100")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 1
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(bfh("010101")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 256
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020100")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 65536
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(
                             bfh("0103010000")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 16777216
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(
                             bfh("010401000000")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 4294967296
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(
                             bfh("01050100000000")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 1099511627776
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(
                             bfh("0106010000000000")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 281474976710656
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(
                             bfh("010701000000000000")),
                                               tlv_stream_name="n1"))
        self.assertEqual({"tlv1": {
            "amount_msat": 72057594037927936
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(
                             bfh("01080100000000000000")),
                                               tlv_stream_name="n1"))
        self.assertEqual(
            {"tlv2": {
                "scid": ShortChannelID.from_components(0, 0, 550)
            }},
            lnser.read_tlv_stream(fd=io.BytesIO(bfh("02080000000000000226")),
                                  tlv_stream_name="n1"))
        self.assertEqual(
            {
                "tlv3": {
                    "node_id":
                    bfh("023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb"
                        ),
                    "amount_msat_1":
                    1,
                    "amount_msat_2":
                    2
                }
            },
            lnser.read_tlv_stream(fd=io.BytesIO(
                bfh("0331023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002"
                    )),
                                  tlv_stream_name="n1"))
        self.assertEqual({"tlv4": {
            "cltv_delta": 550
        }},
                         lnser.read_tlv_stream(fd=io.BytesIO(
                             bfh("fd00fe020226")),
                                               tlv_stream_name="n1"))