def test_dash_tx_pro_up_serv_tx(self): tx = transaction.Transaction(PRO_UP_SERV_TX) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 2 extra_dict = deser['extra_payload'] assert extra_dict == PRO_UP_SERV_TX_D extra = tx.extra_payload assert (str(extra)) assert extra.version == PRO_UP_SERV_TX_D['version'] assert len(extra.proTxHash) == 32 assert extra.proTxHash == bfh(PRO_UP_SERV_TX_D['proTxHash']) assert extra.ipAddress == PRO_UP_SERV_TX_D['ipAddress'] assert extra.port == PRO_UP_SERV_TX_D['port'] assert extra.scriptOperatorPayout == \ bfh(PRO_UP_SERV_TX_D['scriptOperatorPayout']) assert len(extra.inputsHash) == 32 assert extra.inputsHash == bfh(PRO_UP_SERV_TX_D['inputsHash']) assert len(extra.payloadSig) == 96 assert extra.payloadSig == bfh(PRO_UP_SERV_TX_D['payloadSig']) ser = tx.serialize() assert ser == PRO_UP_SERV_TX assert extra.to_hex_str() == PRO_UP_SERV_TX[386:] extra2 = ProTxBase.from_hex_str(SPEC_PRO_UP_SERV_TX, PRO_UP_SERV_TX[386:]) assert extra2.version == extra.version assert extra2.proTxHash == extra.proTxHash assert extra2.ipAddress == extra.ipAddress assert extra2.port == extra.port assert extra2.scriptOperatorPayout == extra.scriptOperatorPayout assert extra2.inputsHash == extra.inputsHash assert extra2.payloadSig == extra.payloadSig
def test_dash_tx_sub_tx_register(self): tx = transaction.Transaction(SUB_TX_REGISTER) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 8 extra_dict = deser['extra_payload'] assert extra_dict == SUB_TX_REGISTER_D extra = tx.extra_payload assert (str(extra)) assert extra.version == SUB_TX_REGISTER_D['version'] assert extra.userName == SUB_TX_REGISTER_D['userName'] assert len(extra.pubKey) == 48 assert extra.pubKey == bfh(SUB_TX_REGISTER_D['pubKey']) assert len(extra.payloadSig) == 96 assert extra.payloadSig == bfh(SUB_TX_REGISTER_D['payloadSig']) ser = tx.serialize() assert ser == SUB_TX_REGISTER assert extra.to_hex_str() == SUB_TX_REGISTER[386:] extra2 = ProTxBase.from_hex_str(SPEC_SUB_TX_REGISTER, SUB_TX_REGISTER[386:]) assert extra2.version == extra.version assert extra2.userName == extra.userName assert extra2.pubKey == extra.pubKey assert extra2.payloadSig == extra.payloadSig
def test_dash_tx_sub_tx_reset_key(self): tx = transaction.Transaction(SUB_TX_RESET_KEY) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 10 extra_dict = deser['extra_payload'] assert extra_dict == SUB_TX_RESET_KEY_D extra = tx.extra_payload assert (str(extra)) assert extra.version == SUB_TX_RESET_KEY_D['version'] assert len(extra.regTxHash) == 32 assert extra.regTxHash == bfh(SUB_TX_RESET_KEY_D['regTxHash']) assert len(extra.hashPrevSubTx) == 32 assert extra.hashPrevSubTx == bfh(SUB_TX_RESET_KEY_D['hashPrevSubTx']) assert extra.creditFee == SUB_TX_RESET_KEY_D['creditFee'] assert len(extra.newPubKey) == 48 assert extra.newPubKey == bfh(SUB_TX_RESET_KEY_D['newPubKey']) assert len(extra.payloadSig) == 96 assert extra.payloadSig == bfh(SUB_TX_RESET_KEY_D['payloadSig']) ser = tx.serialize() assert ser == SUB_TX_RESET_KEY assert extra.to_hex_str() == SUB_TX_RESET_KEY[386:] extra2 = ProTxBase.from_hex_str(SPEC_SUB_TX_RESET_KEY, SUB_TX_RESET_KEY[386:]) assert extra2.version == extra.version assert extra2.regTxHash == extra.regTxHash assert extra2.hashPrevSubTx == extra.hashPrevSubTx assert extra2.creditFee == extra.creditFee assert extra2.newPubKey == extra.newPubKey assert extra2.payloadSig == extra.payloadSig
def test_dash_tx_sub_tx_close_account(self): tx = transaction.Transaction(SUB_TX_CLOSE_ACCOUNT) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 11 extra_dict = deser['extra_payload'] assert extra_dict == SUB_TX_CLOSE_ACCOUNT_D extra = tx.extra_payload assert (str(extra)) assert extra.version == SUB_TX_CLOSE_ACCOUNT_D['version'] assert len(extra.regTxHash) == 32 assert extra.regTxHash == bfh(SUB_TX_CLOSE_ACCOUNT_D['regTxHash']) assert len(extra.hashPrevSubTx) == 32 assert extra.hashPrevSubTx == \ bfh(SUB_TX_CLOSE_ACCOUNT_D['hashPrevSubTx']) assert extra.creditFee == SUB_TX_CLOSE_ACCOUNT_D['creditFee'] assert len(extra.payloadSig) == 96 assert extra.payloadSig == bfh(SUB_TX_CLOSE_ACCOUNT_D['payloadSig']) ser = tx.serialize() assert ser == SUB_TX_CLOSE_ACCOUNT assert extra.to_hex_str() == SUB_TX_CLOSE_ACCOUNT[386:] extra2 = ProTxBase.from_hex_str(SPEC_SUB_TX_CLOSE_ACCOUNT, SUB_TX_CLOSE_ACCOUNT[386:]) assert extra2.version == extra.version assert extra2.regTxHash == extra.regTxHash assert extra2.hashPrevSubTx == extra.hashPrevSubTx assert extra2.creditFee == extra.creditFee assert extra2.payloadSig == extra.payloadSig
def test_version_bytes(self): xprv_headers_b58 = { 'standard': 'tprv', } xpub_headers_b58 = { 'standard': 'tpub', } for xtype, xkey_header_bytes in constants.net.XPRV_HEADERS.items(): xkey_header_bytes = bfh("%08x" % xkey_header_bytes) xkey_bytes = xkey_header_bytes + bytes([0] * 74) xkey_b58 = EncodeBase58Check(xkey_bytes) self.assertTrue(xkey_b58.startswith(xprv_headers_b58[xtype])) xkey_bytes = xkey_header_bytes + bytes([255] * 74) xkey_b58 = EncodeBase58Check(xkey_bytes) self.assertTrue(xkey_b58.startswith(xprv_headers_b58[xtype])) for xtype, xkey_header_bytes in constants.net.XPUB_HEADERS.items(): xkey_header_bytes = bfh("%08x" % xkey_header_bytes) xkey_bytes = xkey_header_bytes + bytes([0] * 74) xkey_b58 = EncodeBase58Check(xkey_bytes) self.assertTrue(xkey_b58.startswith(xpub_headers_b58[xtype])) xkey_bytes = xkey_header_bytes + bytes([255] * 74) xkey_b58 = EncodeBase58Check(xkey_bytes) self.assertTrue(xkey_b58.startswith(xpub_headers_b58[xtype]))
def test_dash_tx_pro_up_rev_tx(self): tx = transaction.Transaction(PRO_UP_REV_TX) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 4 extra_dict = deser['extra_payload'] assert extra_dict == PRO_UP_REV_TX_D extra = tx.extra_payload assert (str(extra)) assert extra.version == PRO_UP_REV_TX_D['version'] assert len(extra.proTxHash) == 32 assert extra.proTxHash == bfh(PRO_UP_REV_TX_D['proTxHash']) assert extra.reason == PRO_UP_REV_TX_D['reason'] assert len(extra.inputsHash) == 32 assert extra.inputsHash == bfh(PRO_UP_REV_TX_D['inputsHash']) assert len(extra.payloadSig) == 96 assert extra.payloadSig == bfh(PRO_UP_REV_TX_D['payloadSig']) ser = tx.serialize() assert ser == PRO_UP_REV_TX assert extra.to_hex_str() == PRO_UP_REV_TX[384:] extra2 = ProTxBase.from_hex_str(SPEC_PRO_UP_REV_TX, PRO_UP_REV_TX[384:]) assert extra2.version == extra.version assert extra2.proTxHash == extra.proTxHash assert extra2.reason == extra.reason assert extra2.inputsHash == extra.inputsHash assert extra2.payloadSig == extra.payloadSig
def test_sign_transaction(self): eckey1 = ecc.ECPrivkey(bfh('7e1255fddb52db1729fc3ceb21a46f95b8d9fe94cc83425e936a6c5223bb679d')) sig1 = eckey1.sign_transaction(bfh('5a548b12369a53faaa7e51b5081829474ebdd9c924b3a8230b69aa0be254cd94')) self.assertEqual('3044022066e7d6a954006cce78a223f5edece8aaedcf3607142e9677acef1cfcb91cfdde022065cb0b5401bf16959ce7b785ea7fd408be5e4cb7d8f1b1a32c78eac6f73678d9', sig1.hex()) eckey2 = ecc.ECPrivkey(bfh('c7ce8c1462c311eec24dff9e2532ac6241e50ae57e7d1833af21942136972f23')) sig2 = eckey2.sign_transaction(bfh('642a2e66332f507c92bda910158dfe46fc10afbf72218764899d3af99a043fac')) self.assertEqual('30440220618513f4cfc87dde798ce5febae7634c23e7b9254a1eabf486be820f6a7c2c4702204fef459393a2b931f949e63ced06888f35e286e446dc46feb24b5b5f81c6ed52', sig2.hex())
def test_verify_fail_f_tx_even(self): """Raise if inner node of merkle branch is valid tx. ('even' fake leaf position)""" # last 32 bytes of T encoded as hash fake_branch_node = hash_encode(bfh(VALID_64_BYTE_TX[64:])) fake_mbranch = [fake_branch_node] + MERKLE_BRANCH # first 32 bytes of T encoded as hash f_tx_hash = hash_encode(bfh(VALID_64_BYTE_TX[:64])) with self.assertRaises(InnerNodeOfSpvProofIsValidTx): SPV.hash_merkle_root(fake_mbranch, f_tx_hash, 6)
def parse_script(self, x): script = '' for word in x.split(): if word[0:3] == 'OP_': opcode_int = opcodes[word] script += construct_script([opcode_int]) else: bfh(word) # to test it is hex data script += construct_script([word]) return script
def parse_output(self, x) -> bytes: try: address = self.parse_address(x) return bfh(bitcoin.address_to_script(address)) except Exception: pass try: script = self.parse_script(x) return bfh(script) except Exception: pass raise Exception("Invalid address or script.")
def get_noise_map( cls, versioned_seed: VersionedSeed) -> Dict[Tuple[int, int], int]: """Returns a map from (x,y) coordinate to pixel value 0/1, to be used as rawnoise.""" w, h = cls.SIZE version = versioned_seed.version hex_seed = versioned_seed.seed checksum = versioned_seed.checksum noise_map = {} if version == '0': random.seed(int(hex_seed, 16)) for x in range(w): for y in range(h): noise_map[(x, y)] = random.randint(0, 1) elif version == '1': prng_seed = bfh(hex_seed + version + checksum) drbg = DRBG(prng_seed) num_noise_bytes = 1929 # ~ w*h noise_array = bin( int.from_bytes(drbg.generate(num_noise_bytes), 'big'))[2:] # there's an approx 1/1024 chance that the generated number is 'too small' # and we would get IndexError below. easiest backwards compat fix: noise_array += '0' * (w * h - len(noise_array)) i = 0 for x in range(w): for y in range(h): noise_map[(x, y)] = int(noise_array[i]) i += 1 else: raise Exception(f"unexpected revealer version: {version}") return noise_map
def test_base58(self): data_hex = '0cd394bef396200774544c58a5be0189f3ceb6a41c8da023b099ce547dd4d8071ed6ed647259fba8c26382edbf5165dfd2404e7a8885d88437db16947a116e451a5d1325e3fd075f9d370120d2ab537af69f32e74fc0ba53aaaa637752964b3ac95cfea7' data_bytes = bfh(data_hex) data_base58 = base_encode(data_bytes, base=58) self.assertEqual("VuvZ2K5UEcXCVcogny7NH4Evd9UfeYipsTdWuU4jLDhyaESijKtrGWZTFzVZJPjaoC9jFBs3SFtarhDhQhAxkXosUD8PmUb5UXW1tafcoPiCp8jHy7Fe2CUPXAbYuMvAyrkocbe6", data_base58) self.assertEqual(data_bytes, base_decode(data_base58, base=58))
def test_base58check(self): data_hex = '0cd394bef396200774544c58a5be0189f3ceb6a41c8da023b099ce547dd4d8071ed6ed647259fba8c26382edbf5165dfd2404e7a8885d88437db16947a116e451a5d1325e3fd075f9d370120d2ab537af69f32e74fc0ba53aaaa637752964b3ac95cfea7' data_bytes = bfh(data_hex) data_base58check = EncodeBase58Check(data_bytes) self.assertEqual("4GCCJsjHqFbHxWbFBvRg35cSeNLHKeNqkXqFHW87zRmz6iP1dJU9Tk2KHZkoKj45jzVsSV4ZbQ8GpPwko6V3Z7cRfux3zJhUw7TZB6Kpa8Vdya8cMuUtL5Ry3CLtMetaY42u52X7Ey6MAH", data_base58check) self.assertEqual(data_bytes, DecodeBase58Check(data_base58check))
def test_base43(self): tx_hex = "020000000001021cd0e96f9ca202e017ca3465e3c13373c0df3a4cdd91c1fd02ea42a1a65d2a410000000000fdffffff757da7cf8322e5063785e2d8ada74702d2648fa2add2d533ba83c52eb110df690200000000fdffffff02d07e010000000000160014b544c86eaf95e3bb3b6d2cabb12ab40fc59cad9ca086010000000000232102ce0d066fbfcf150a5a1bbc4f312cd2eb080e8d8a47e5f2ce1a63b23215e54fb5ac02483045022100a9856bf10a950810abceeabc9a86e6ba533e130686e3d7863971b9377e7c658a0220288a69ef2b958a7c2ecfa376841d4a13817ed24fa9a0e0a6b9cb48e6439794c701210324e291735f83ff8de47301b12034950b80fa4724926a34d67e413d8ff8817c53024830450221008f885978f7af746679200ed55fe2e86c1303620824721f95cc41eb7965a3dfcf02207872082ac4a3c433d41a203e6d685a459e70e551904904711626ac899238c20a0121023d4c9deae1aacf3f822dd97a28deaec7d4e4ff97be746d124a63d20e582f5b290a971600" tx_bytes = bfh(tx_hex) tx_base43 = base_encode(tx_bytes, base=43) self.assertEqual("3E2DH7.J3PKVZJ3RCOXQVS3Y./6-WE.75DDU0K58-0N1FRL565N8ZH-DG1Z.1IGWTE5HK8F7PWH5P8+V3XGZZ6GQBPHNDE+RD8CAQVV1/6PQEMJIZTGPMIJ93B8P$QX+Y2R:TGT9QW8S89U4N2.+FUT8VG+34USI/N/JJ3CE*KLSW:REE8T5Y*9:U6515JIUR$6TODLYHSDE3B5DAF:5TF7V*VAL3G40WBOM0DO2+CFKTTM$G-SO:8U0EW:M8V:4*R9ZDX$B1IRBP9PLMDK8H801PNTFB4$HL1+/U3F61P$4N:UAO88:N5D+J:HI4YR8IM:3A7K1YZ9VMRC/47$6GGW5JEL1N690TDQ4XW+TWHD:V.1.630QK*JN/.EITVU80YS3.8LWKO:2STLWZAVHUXFHQ..NZ0:.J/FTZM.KYDXIE1VBY7/:PHZMQ$.JZQ2.XT32440X/HM+UY/7QP4I+HTD9.DUSY-8R6HDR-B8/PF2NP7I2-MRW9VPW3U9.S0LQ.*221F8KVMD5ANJXZJ8WV4UFZ4R.$-NXVE+-FAL:WFERGU+WHJTHAP", tx_base43) self.assertEqual(tx_bytes, base_decode(tx_base43, base=43))
def test_dash_tx_pro_up_reg_tx(self): tx = transaction.Transaction(PRO_UP_REG_TX) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 3 extra_dict = deser['extra_payload'] assert extra_dict == PRO_UP_REG_TX_D extra = tx.extra_payload assert (str(extra)) assert extra.version == PRO_UP_REG_TX_D['version'] assert extra.proTxHash == bfh(PRO_UP_REG_TX_D['proTxHash']) assert extra.mode == PRO_UP_REG_TX_D['mode'] assert len(extra.PubKeyOperator) == 48 assert extra.PubKeyOperator == bfh(PRO_UP_REG_TX_D['PubKeyOperator']) assert len(extra.KeyIdVoting) == 20 assert extra.KeyIdVoting == bfh(PRO_UP_REG_TX_D['KeyIdVoting']) assert extra.scriptPayout == bfh(PRO_UP_REG_TX_D['scriptPayout']) assert len(extra.inputsHash) == 32 assert extra.inputsHash == bfh(PRO_UP_REG_TX_D['inputsHash']) assert extra.payloadSig == bfh(PRO_UP_REG_TX_D['payloadSig']) ser = tx.serialize() assert ser == PRO_UP_REG_TX assert extra.to_hex_str() == PRO_UP_REG_TX[384:] extra2 = ProTxBase.from_hex_str(SPEC_PRO_UP_REG_TX, PRO_UP_REG_TX[384:]) assert extra2.version == extra.version assert extra2.proTxHash == extra.proTxHash assert extra2.mode == extra.mode assert extra2.PubKeyOperator == extra.PubKeyOperator assert extra2.KeyIdVoting == extra.KeyIdVoting assert extra2.scriptPayout == extra.scriptPayout assert extra2.inputsHash == extra.inputsHash assert extra2.payloadSig == extra.payloadSig
def test_dash_tx_cb_tx_v2(self): tx = transaction.Transaction(CB_TX_V2) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 5 extra_dict = deser['extra_payload'] assert extra_dict == CB_TX_V2_D extra = tx.extra_payload assert (str(extra)) assert extra.version == CB_TX_V2_D['version'] assert extra.height == CB_TX_V2_D['height'] assert len(extra.merkleRootMNList) == 32 assert extra.merkleRootMNList == bfh(CB_TX_V2_D['merkleRootMNList']) assert len(extra.merkleRootQuorums) == 32 assert extra.merkleRootQuorums == bfh(CB_TX_V2_D['merkleRootQuorums']) ser = tx.serialize() assert ser == CB_TX_V2 assert extra.to_hex_str() == CB_TX_V2[532:] extra2 = ProTxBase.from_hex_str(SPEC_CB_TX, CB_TX_V2[532:]) assert extra2.version == extra.version assert extra2.height == extra.height assert extra2.merkleRootMNList == extra.merkleRootMNList
def _do_test_bip32(self, seed: str, sequence): node = BIP32Node.from_rootseed(bfh(seed), xtype='standard') xprv, xpub = node.to_xprv(), node.to_xpub() self.assertEqual("m/", sequence[0:2]) sequence = sequence[2:] for n in sequence.split('/'): if n[-1] != "'": xpub2 = BIP32Node.from_xkey(xpub).subkey_at_public_derivation(n).to_xpub() node = BIP32Node.from_xkey(xprv).subkey_at_private_derivation(n) xprv, xpub = node.to_xprv(), node.to_xpub() if n[-1] != "'": self.assertEqual(xpub, xpub2) return xpub, xprv
def test_tx_outpoint(self): # test normal outpoint o = TxOutPoint(bfh(TEST_HASH)[::-1], 1) assert o.is_null == False assert o.hash_is_null == False assert str(o) == TEST_HASH + ':1' ser = o.serialize() assert bh2u(ser) == bh2u(bfh(TEST_HASH)[::-1]) + '01000000' s = BCDataStream() s.write(ser) o2 = TxOutPoint.read_vds(s) assert str(o2) == str(o) # test null outpoint o = TxOutPoint(b'\x00' * 32, -1) assert o.is_null == True assert o.hash_is_null == True assert str(o) == '0' * 64 + ':-1' ser = o.serialize() assert bh2u(ser) == '0' * 64 + 'f' * 8 s = BCDataStream() s.write(ser) o2 = TxOutPoint.read_vds(s) assert str(o2) == str(o) # test null hash o = TxOutPoint(b'\x00' * 32, 0) assert o.is_null == False assert o.hash_is_null == True assert str(o) == '0' * 64 + ':0' ser = o.serialize() assert bh2u(ser) == '0' * 64 + '00000000' s = BCDataStream() s.write(ser) o2 = TxOutPoint.read_vds(s) assert str(o2) == str(o)
def test_dash_tx_pro_reg_tx(self): tx = transaction.Transaction(PRO_REG_TX) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 1 extra_dict = deser['extra_payload'] assert extra_dict == PRO_REG_TX_D extra = tx.extra_payload assert (str(extra)) assert extra.version == PRO_REG_TX_D['version'] assert extra.type == PRO_REG_TX_D['type'] assert extra.mode == PRO_REG_TX_D['mode'] assert len(extra.collateralOutpoint.hash) == 32 assert extra.collateralOutpoint.hash == \ bfh(PRO_REG_TX_D['collateralOutpoint']['hash'])[::-1] assert extra.collateralOutpoint.index == \ PRO_REG_TX_D['collateralOutpoint']['index'] assert extra.ipAddress == PRO_REG_TX_D['ipAddress'] assert extra.port == PRO_REG_TX_D['port'] assert len(extra.KeyIdOwner) == 20 assert extra.KeyIdOwner == bfh(PRO_REG_TX_D['KeyIdOwner']) assert len(extra.PubKeyOperator) == 48 assert extra.PubKeyOperator == bfh(PRO_REG_TX_D['PubKeyOperator']) assert len(extra.KeyIdVoting) == 20 assert extra.KeyIdVoting == bfh(PRO_REG_TX_D['KeyIdVoting']) assert extra.operatorReward == PRO_REG_TX_D['operatorReward'] assert extra.scriptPayout == bfh(PRO_REG_TX_D['scriptPayout']) assert len(extra.inputsHash) == 32 assert extra.inputsHash == bfh(PRO_REG_TX_D['inputsHash']) assert extra.payloadSig == bfh(PRO_REG_TX_D['payloadSig']) ser = tx.serialize() assert ser == PRO_REG_TX assert extra.to_hex_str() == PRO_REG_TX[980:] extra2 = ProTxBase.from_hex_str(SPEC_PRO_REG_TX, PRO_REG_TX[980:]) assert extra2.version == extra.version assert extra2.type == extra.type assert extra2.mode == extra.mode assert extra2.collateralOutpoint.hash == extra.collateralOutpoint.hash assert extra2.collateralOutpoint.index == \ extra.collateralOutpoint.index assert extra2.ipAddress == extra.ipAddress assert extra2.port == extra.port assert extra2.KeyIdOwner == extra.KeyIdOwner assert extra2.PubKeyOperator == extra.PubKeyOperator assert extra2.KeyIdVoting == extra.KeyIdVoting assert extra2.operatorReward == extra.operatorReward assert extra2.scriptPayout == extra.scriptPayout assert extra2.inputsHash == extra.inputsHash assert extra2.payloadSig == extra.payloadSig
def sign_transaction(self, keystore, tx: PartialTransaction, prev_tx): prev_tx = { bfh(txhash): self.electrum_tx_to_txtype(tx) for txhash, tx in prev_tx.items() } client = self.get_client(keystore) inputs = self.tx_inputs(tx, for_sig=True, keystore=keystore) outputs = self.tx_outputs(tx, keystore=keystore) details = SignTx(lock_time=tx.locktime, version=tx.version) signatures, _ = client.sign_tx(self.get_coin_name(), inputs, outputs, details=details, prev_txes=prev_tx) signatures = [(bh2u(x) + '01') for x in signatures] tx.update_signatures(signatures)
def test_version_msg(self): msg = DashVersionMsg.from_hex(VERSION_MSG) assert msg.version == 70215 assert msg.services == 5 assert msg.timestamp == 1567673683 assert msg.recv_services == 0 assert msg.recv_ip == IPv6Address('::') assert msg.recv_port == 0 assert msg.trans_services == 5 assert msg.trans_ip == IPv6Address('::') assert msg.trans_port == 0 assert msg.nonce == 12615609395080869973 assert msg.user_agent == b'/Dash Core:0.14.0.3/' assert msg.start_height == 169118 assert msg.relay == 1 assert msg.mnauth_challenge == bfh('9404092b784754ae2757c614d3e76ae1' '80dc66305055ee6e6c62b409c1d57b30') assert msg.fMasternode == 0 assert bh2u(msg.serialize()) == VERSION_MSG
def test_dash_tx_sub_tx_topup(self): tx = transaction.Transaction(SUB_TX_TOPUP) deser = tx.to_json() assert deser['version'] == 3 assert deser['tx_type'] == 9 extra_dict = deser['extra_payload'] assert extra_dict == SUB_TX_TOPUP_D extra = tx.extra_payload assert (str(extra)) assert extra.version == SUB_TX_TOPUP_D['version'] assert len(extra.regTxHash) == 32 assert extra.regTxHash == bfh(SUB_TX_TOPUP_D['regTxHash']) ser = tx.serialize() assert ser == SUB_TX_TOPUP assert extra.to_hex_str() == SUB_TX_TOPUP[386:] extra2 = ProTxBase.from_hex_str(SPEC_SUB_TX_TOPUP, SUB_TX_TOPUP[386:]) assert extra2.version == extra.version assert extra2.regTxHash == extra.regTxHash
def update(self): raw_tx = str(self.tx) tx_type = tx_header_to_tx_type(bfh(raw_tx[:8])) if tx_type == 0: txid = self.tx.txid() tx_type, completed = self.wallet.db.get_ps_tx(txid) self.title = '%s %s' % (SPEC_TX_NAMES[tx_type], _('Transaction')) format_amount = self.app.format_amount_and_units tx_details = self.wallet.get_tx_info(self.tx) tx_mined_status = tx_details.tx_mined_status exp_n = tx_details.mempool_depth_bytes amount, fee = tx_details.amount, tx_details.fee self.status_str = tx_details.status self.description = tx_details.label self.can_broadcast = tx_details.can_broadcast self.tx_hash = tx_details.txid or '' islock = tx_details.islock timestamp = tx_mined_status.timestamp if not timestamp and islock: timestamp = islock if timestamp: self.date_label = _('Date') dttm = datetime.fromtimestamp(timestamp) self.date_str = dttm.isoformat(' ')[:-3] elif exp_n is not None: self.date_label = _('Mempool depth') self.date_str = _('{} from tip').format('%.2f MB' % (exp_n / 1000000)) else: self.date_label = '' self.date_str = '' self.can_sign = self.wallet.can_sign(self.tx) if amount is None: self.amount_str = _("Transaction unrelated to your wallet") elif amount > 0: self.is_mine = False self.amount_str = format_amount(amount) else: self.is_mine = True self.amount_str = format_amount(-amount) risk_of_burning_coins = ( isinstance(self.tx, PartialTransaction) and self.can_sign and fee is not None and bool( self.wallet.get_warning_for_risk_of_burning_coins_as_fees( self.tx))) if fee is not None and not risk_of_burning_coins: self.fee_str = format_amount(fee) fee_per_kb = fee / self.tx.estimated_size() * 1000 self.feerate_str = self.app.format_fee_rate(fee_per_kb) else: self.fee_str = _('unknown') self.feerate_str = _('unknown') self.ids.output_list.update(self.tx.outputs()) for dict_entry in self.ids.output_list.data: dict_entry['color'], dict_entry[ 'background_color'] = address_colors(self.wallet, dict_entry['address']) self.can_remove_tx = tx_details.can_remove self.update_action_button()
def sign_transaction(self, tx, password): if tx.is_complete(): return inputs = [] inputsPaths = [] chipInputs = [] redeemScripts = [] changePath = "" output = None p2shTransaction = False pin = "" client_ledger = self.get_client( ) # prompt for the PIN before displaying the dialog if necessary client_electrum = self.get_client_electrum() assert client_electrum # Fetch inputs of the transaction to sign for txin in tx.inputs(): if txin.is_coinbase_input(): self.give_error( "Coinbase not supported") # should never happen if txin.script_type in ['p2sh']: p2shTransaction = True my_pubkey, full_path = self.find_my_pubkey_in_txinout(txin) if not full_path: self.give_error("No matching pubkey for sign_transaction" ) # should never happen full_path = convert_bip32_intpath_to_strpath(full_path)[2:] redeemScript = Transaction.get_preimage_script(txin) txin_prev_tx = txin.utxo if txin_prev_tx is None: raise UserFacingException( _('Missing previous tx for legacy input.')) txin_prev_tx_raw = txin_prev_tx.serialize( ) if txin_prev_tx else None txin_prev_tx.deserialize() tx_type = txin_prev_tx.tx_type extra_payload = txin_prev_tx.extra_payload extra_data = b'' if tx_type and extra_payload: extra_payload = extra_payload.serialize() extra_data = bfh(var_int(len(extra_payload))) + extra_payload inputs.append([ txin_prev_tx_raw, txin.prevout.out_idx, redeemScript, txin.prevout.txid.hex(), my_pubkey, txin.nsequence, txin.value_sats(), extra_data ]) inputsPaths.append(full_path) # Sanity check if p2shTransaction: for txin in tx.inputs(): if txin.script_type != 'p2sh': self.give_error( "P2SH / regular input mixed in same transaction not supported" ) # should never happen txOutput = var_int(len(tx.outputs())) for o in tx.outputs(): txOutput += int_to_hex(o.value, 8) script = o.scriptpubkey.hex() txOutput += var_int(len(script) // 2) txOutput += script txOutput = bfh(txOutput) if not client_electrum.supports_multi_output(): if len(tx.outputs()) > 2: self.give_error( "Transaction with more than 2 outputs not supported") for txout in tx.outputs(): if client_electrum.is_hw1( ) and txout.address and not is_b58_address(txout.address): self.give_error( _("This {} device can only send to base58 addresses."). format(self.device)) if not txout.address: if client_electrum.is_hw1(): self.give_error( _("Only address outputs are supported by {}").format( self.device)) # note: max_size based on https://github.com/LedgerHQ/ledger-app-btc/commit/3a78dee9c0484821df58975803e40d58fbfc2c38#diff-c61ccd96a6d8b54d48f54a3bc4dfa7e2R26 validate_op_return_output(txout, max_size=190) # Output "change" detection # - only one output and one change is authorized (for hw.1 and nano) # - at most one output can bypass confirmation (~change) (for all) if not p2shTransaction: has_change = False any_output_on_change_branch = is_any_tx_output_on_change_branch(tx) for txout in tx.outputs(): if txout.is_mine and len(tx.outputs()) > 1 \ and not has_change: # prioritise hiding outputs on the 'change' branch from user # because no more than one change address allowed if txout.is_change == any_output_on_change_branch: my_pubkey, changePath = self.find_my_pubkey_in_txinout( txout) assert changePath changePath = convert_bip32_intpath_to_strpath( changePath)[2:] has_change = True else: output = txout.address else: output = txout.address if not self.get_client_electrum().canAlternateCoinVersions: v, h = b58_address_to_hash160(output) if v == constants.net.ADDRTYPE_P2PKH: output = hash160_to_b58_address(h, 0) self.handler.show_message( _("Confirm Transaction on your Ledger device...")) try: # Get trusted inputs from the original transactions for utxo in inputs: sequence = int_to_hex(utxo[5], 4) if (not p2shTransaction ) or client_electrum.supports_multi_output(): txtmp = bitcoinTransaction(bfh(utxo[0])) txtmp.extra_data = utxo[7] trustedInput = client_ledger.getTrustedInput( txtmp, utxo[1]) trustedInput['sequence'] = sequence chipInputs.append(trustedInput) if p2shTransaction: redeemScripts.append(bfh(utxo[2])) else: redeemScripts.append(txtmp.outputs[utxo[1]].script) else: tmp = bfh(utxo[3])[::-1] tmp += bfh(int_to_hex(utxo[1], 4)) chipInputs.append({'value': tmp, 'sequence': sequence}) redeemScripts.append(bfh(utxo[2])) # Sign all inputs firstTransaction = True inputIndex = 0 rawTx = tx.serialize_to_network() client_ledger.enableAlternate2fa(False) while inputIndex < len(inputs): client_ledger.startUntrustedTransaction( firstTransaction, inputIndex, chipInputs, redeemScripts[inputIndex], version=tx.version) # we don't set meaningful outputAddress, amount and fees # as we only care about the alternateEncoding==True branch outputData = client_ledger.finalizeInput( b'', 0, 0, changePath, bfh(rawTx)) outputData['outputData'] = txOutput if outputData['confirmationNeeded']: outputData['address'] = output self.handler.finished() # do the authenticate dialog and get pin: pin = self.handler.get_auth(outputData, client=client_electrum) if not pin: raise UserWarning() self.handler.show_message( _("Confirmed. Signing Transaction...")) else: # Sign input with the provided PIN inputSignature = client_ledger.untrustedHashSign( inputsPaths[inputIndex], pin, lockTime=tx.locktime) inputSignature[0] = 0x30 # force for 1.4.9+ my_pubkey = inputs[inputIndex][4] tx.add_signature_to_txin(txin_idx=inputIndex, signing_pubkey=my_pubkey.hex(), sig=inputSignature.hex()) inputIndex = inputIndex + 1 firstTransaction = False except UserWarning: self.handler.show_error(_('Cancelled by user')) return except BTChipException as e: if e.sw in (0x6985, 0x6d00): # cancelled by user return elif e.sw == 0x6982: raise # pin lock. decorator will catch it else: self.logger.exception('') self.give_error(e, True) except BaseException as e: self.logger.exception('') self.give_error(e, True) finally: self.handler.finished()
class TestBlockchain(ElectrumTestCase): HEADERS = { 'A': deserialize_header( bfh("010000000000000000000000000000000000000000000000000000000000000000000000c762a6567f3cc092f0684bb62b7e00a84890b990f07cc71a6bb58d64b98e02e0 b9968054 ffff7f20 ffba1000" ), 0), 'B': deserialize_header( bfh("000000202e3df23eec5cd6a86edd509539028e2c3a3dc05315eb28f2baa43218ca080000186c8dfd970a4545f79916bc1d75c9d00432f57c89209bf3bb115b7612848f509c25f45bffff7f2000000000" ), 1), 'C': deserialize_header( bfh("000000200a8be74779a59fec4f56abd6ce33bf2a8a1e896b0290a2aba90cf8fa6e6a88f7bf2cbf153013a1c54abaf70e95198fcef2f3059cc6b4d0f7e876808e7d24d11cc825f45bffff7f2000000000" ), 2), 'D': deserialize_header( bfh("000000204a030521422dda1f980cfc2b38149edd3d8eab547e6efa3ab855048feb68dbdae71019d7feecd9b8596eca9a67032c5f4641b23b5d731dc393e37de7f9c2f299e725f45bffff7f2000000000" ), 3), 'E': deserialize_header( bfh("00000020e39959c005b364248b24a17a72fcfe89d8478c71645b85edd444031ef5e5f896a3586da94c71753f27c075f57f44faf913c31177a0957bbda42e7699e3a2141aed25f45bffff7f2001000000" ), 4), 'F': deserialize_header( bfh("00000020d02b1711b7bc72feb7b3e599e9f9bb67f163c95203a64f6dcd4f6176c15d31437aee1d692d1615c3bdf52c291032144ce9e3b258a473c17c745047f3431ff8e2ee25f45bffff7f2000000000" ), 5), 'O': deserialize_header( bfh("00000020ed0bfee047765d7f4233106a13b4ff6d6c67f7ef9aec0e7466759f00ea74b2613a141ce635cbb1cd2b3a4fcdd0a3380517845ba41736c82a79cab535d31128066526f45bffff7f2001000000" ), 6), 'P': deserialize_header( bfh("000000201f9b9f1e295fd4eda90b03b62a676f93642d28c258d8222a2e9d5f0c75cae0a99690c2fe7c1a4450c74dc908fe94dd96c3b0637d51475e9e06a78e944a0c7fe28126f45bffff7f2000000000" ), 7), 'Q': deserialize_header( bfh("000000200076268f577977b9e7386f68a9c3c332aa613d27243abd8167a1bd891adf404f148be228a4c3f2061bafe7efdfc4a8d5a94759464b9b5c619994d45dfcaf49e1a126f45bffff7f2000000000" ), 8), 'R': deserialize_header( bfh("000000208cfac7d4caa975c6b7fe770a8ea35a77a02f6e9b1900bae67a389619095c757515681cb2d00ff889193f6a68a93f5096aeb2d84ca0af6185a462555822552221a626f45bffff7f2000000000" ), 9), 'S': deserialize_header( bfh("00000020936defed88e60da5cef2106338ef9ec221d65e9226f1fc29ec76e4b7c34a649c9dc087fc977b06c24a69c682d1afd1020e6dc1f087571ccec66310a786e1548fab26f45bffff7f2000000000" ), 10), 'T': deserialize_header( bfh("00000020372528176ba7c014b6f388ba338c7a87a5c50bc4d8a1a1d5900cbf5725e6822903b243756c25053253aeda309604363460a3911015929e68705bd89dff6fe064b026f45bffff7f2002000000" ), 11), 'U': deserialize_header( bfh("00000020c5a999182175cb571c7a15a08b8577e21b67c156a2c0ceebcce0d897e664fc3ad67cb902a7d807cee7676cb543feec3e053aa824d5dfb528d5b94f9760313d9db726f45bffff7f2001000000" ), 12), 'G': deserialize_header( bfh("00000020ed0bfee047765d7f4233106a13b4ff6d6c67f7ef9aec0e7466759f00ea74b2613a141ce635cbb1cd2b3a4fcdd0a3380517845ba41736c82a79cab535d31128066928f45bffff7f2001000000" ), 6), 'H': deserialize_header( bfh("00000020f8ca2216e002361e7cc1dd3e1197443e0b8068adaeec43d14be0e4f2159659e39690c2fe7c1a4450c74dc908fe94dd96c3b0637d51475e9e06a78e944a0c7fe26a28f45bffff7f2002000000" ), 7), 'I': deserialize_header( bfh("00000020996b8b880bfe34b81dda59ae28ee28625a4dff565f671540a4703ebabd0ab991148be228a4c3f2061bafe7efdfc4a8d5a94759464b9b5c619994d45dfcaf49e16a28f45bffff7f2000000000" ), 8), 'J': deserialize_header( bfh("000000201d5a4dfeeda94c6e4c3e40ce5c30df07e8103dba70cbce9d6b0890405c76b06715681cb2d00ff889193f6a68a93f5096aeb2d84ca0af6185a462555822552221c928f45bffff7f2000000000" ), 9), 'K': deserialize_header( bfh("00000020f93c46944a529187faae721951e66e187a0e910104e91ec8d1d4a914cadd79a89dc087fc977b06c24a69c682d1afd1020e6dc1f087571ccec66310a786e1548fca28f45bffff7f2000000000" ), 10), 'L': deserialize_header( bfh("00000020d76bdf59ed1ce4a4a31aa7649f8a39da2b956515f3bdb78b2bcdaaed60444bad03b243756c25053253aeda309604363460a3911015929e68705bd89dff6fe064ca28f45bffff7f2000000000" ), 11), 'M': deserialize_header( bfh("000000201d5a4dfeeda94c6e4c3e40ce5c30df07e8103dba70cbce9d6b0890405c76b06715681cb2d00ff889193f6a68a93f5096aeb2d84ca0af6185a4625558225522214229f45bffff7f2000000000" ), 9), 'N': deserialize_header( bfh("00000020ff8ef64ad77c7c02103127be41dc39dda5f4dd17cbbaa7475fa8b7a3dd110ee19dc087fc977b06c24a69c682d1afd1020e6dc1f087571ccec66310a786e1548f4329f45bffff7f2003000000" ), 10), 'X': deserialize_header( bfh("000000202857b96792f630a80f7c834afd5985b833794037930c1fe655c23b6eb769c85203b243756c25053253aeda309604363460a3911015929e68705bd89dff6fe0649b29f45bffff7f2002000000" ), 11), 'Y': deserialize_header( bfh("000000206cc9a0dec93cffaab358ef9bd06fa0137d53e37a4b251f57da831ef31fccf9f2d67cb902a7d807cee7676cb543feec3e053aa824d5dfb528d5b94f9760313d9d9b29f45bffff7f2000000000" ), 12), 'Z': deserialize_header( bfh("00000020756a6bfe58694141de4abf3317bccfa105b5ec30b997dda15a9ab02a9d86eba00f2596c29203f8a0f71ae94193092dc8f113be3dbee4579f1e649fa3d6dcc38c622ef45bffff7f2003000000" ), 13), } # tree of headers: # - M <- N <- X <- Y <- Z # / # - G <- H <- I <- J <- K <- L # / # A <- B <- C <- D <- E <- F <- O <- P <- Q <- R <- S <- T <- U @classmethod def setUpClass(cls): super().setUpClass() constants.set_regtest() @classmethod def tearDownClass(cls): super().tearDownClass() constants.set_mainnet() def setUp(self): super().setUp() self.data_dir = self.electrum_path make_dir(os.path.join(self.data_dir, 'forks')) self.config = SimpleConfig({'electrum_path': self.data_dir}) blockchain.blockchains = {} def _append_header(self, chain: Blockchain, header: dict): self.assertTrue(chain.can_connect(header)) chain.save_header(header) def test_get_height_of_last_common_block_with_chain(self): blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain( config=self.config, forkpoint=0, parent=None, forkpoint_hash=constants.net.GENESIS, prev_hash=None) open(chain_u.path(), 'w+').close() self._append_header(chain_u, self.HEADERS['A']) self._append_header(chain_u, self.HEADERS['B']) self._append_header(chain_u, self.HEADERS['C']) self._append_header(chain_u, self.HEADERS['D']) self._append_header(chain_u, self.HEADERS['E']) self._append_header(chain_u, self.HEADERS['F']) self._append_header(chain_u, self.HEADERS['O']) self._append_header(chain_u, self.HEADERS['P']) self._append_header(chain_u, self.HEADERS['Q']) chain_l = chain_u.fork(self.HEADERS['G']) self._append_header(chain_l, self.HEADERS['H']) self._append_header(chain_l, self.HEADERS['I']) self._append_header(chain_l, self.HEADERS['J']) self._append_header(chain_l, self.HEADERS['K']) self._append_header(chain_l, self.HEADERS['L']) self.assertEqual({ chain_u: 8, chain_l: 5 }, chain_u.get_parent_heights()) self.assertEqual({chain_l: 11}, chain_l.get_parent_heights()) chain_z = chain_l.fork(self.HEADERS['M']) self._append_header(chain_z, self.HEADERS['N']) self._append_header(chain_z, self.HEADERS['X']) self._append_header(chain_z, self.HEADERS['Y']) self._append_header(chain_z, self.HEADERS['Z']) self.assertEqual({ chain_u: 8, chain_z: 5 }, chain_u.get_parent_heights()) self.assertEqual({ chain_l: 11, chain_z: 8 }, chain_l.get_parent_heights()) self.assertEqual({chain_z: 13}, chain_z.get_parent_heights()) self.assertEqual( 5, chain_u.get_height_of_last_common_block_with_chain(chain_l)) self.assertEqual( 5, chain_l.get_height_of_last_common_block_with_chain(chain_u)) self.assertEqual( 5, chain_u.get_height_of_last_common_block_with_chain(chain_z)) self.assertEqual( 5, chain_z.get_height_of_last_common_block_with_chain(chain_u)) self.assertEqual( 8, chain_l.get_height_of_last_common_block_with_chain(chain_z)) self.assertEqual( 8, chain_z.get_height_of_last_common_block_with_chain(chain_l)) self._append_header(chain_u, self.HEADERS['R']) self._append_header(chain_u, self.HEADERS['S']) self._append_header(chain_u, self.HEADERS['T']) self._append_header(chain_u, self.HEADERS['U']) self.assertEqual({ chain_u: 12, chain_z: 5 }, chain_u.get_parent_heights()) self.assertEqual({ chain_l: 11, chain_z: 8 }, chain_l.get_parent_heights()) self.assertEqual({chain_z: 13}, chain_z.get_parent_heights()) self.assertEqual( 5, chain_u.get_height_of_last_common_block_with_chain(chain_l)) self.assertEqual( 5, chain_l.get_height_of_last_common_block_with_chain(chain_u)) self.assertEqual( 5, chain_u.get_height_of_last_common_block_with_chain(chain_z)) self.assertEqual( 5, chain_z.get_height_of_last_common_block_with_chain(chain_u)) self.assertEqual( 8, chain_l.get_height_of_last_common_block_with_chain(chain_z)) self.assertEqual( 8, chain_z.get_height_of_last_common_block_with_chain(chain_l)) def test_parents_after_forking(self): blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain( config=self.config, forkpoint=0, parent=None, forkpoint_hash=constants.net.GENESIS, prev_hash=None) open(chain_u.path(), 'w+').close() self._append_header(chain_u, self.HEADERS['A']) self._append_header(chain_u, self.HEADERS['B']) self._append_header(chain_u, self.HEADERS['C']) self._append_header(chain_u, self.HEADERS['D']) self._append_header(chain_u, self.HEADERS['E']) self._append_header(chain_u, self.HEADERS['F']) self._append_header(chain_u, self.HEADERS['O']) self._append_header(chain_u, self.HEADERS['P']) self._append_header(chain_u, self.HEADERS['Q']) self.assertEqual(None, chain_u.parent) chain_l = chain_u.fork(self.HEADERS['G']) self._append_header(chain_l, self.HEADERS['H']) self._append_header(chain_l, self.HEADERS['I']) self._append_header(chain_l, self.HEADERS['J']) self._append_header(chain_l, self.HEADERS['K']) self._append_header(chain_l, self.HEADERS['L']) self.assertEqual(None, chain_l.parent) self.assertEqual(chain_l, chain_u.parent) chain_z = chain_l.fork(self.HEADERS['M']) self._append_header(chain_z, self.HEADERS['N']) self._append_header(chain_z, self.HEADERS['X']) self._append_header(chain_z, self.HEADERS['Y']) self._append_header(chain_z, self.HEADERS['Z']) self.assertEqual(chain_z, chain_u.parent) self.assertEqual(chain_z, chain_l.parent) self.assertEqual(None, chain_z.parent) self._append_header(chain_u, self.HEADERS['R']) self._append_header(chain_u, self.HEADERS['S']) self._append_header(chain_u, self.HEADERS['T']) self._append_header(chain_u, self.HEADERS['U']) self.assertEqual(chain_z, chain_u.parent) self.assertEqual(chain_z, chain_l.parent) self.assertEqual(None, chain_z.parent) def test_forking_and_swapping(self): blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain( config=self.config, forkpoint=0, parent=None, forkpoint_hash=constants.net.GENESIS, prev_hash=None) open(chain_u.path(), 'w+').close() self._append_header(chain_u, self.HEADERS['A']) self._append_header(chain_u, self.HEADERS['B']) self._append_header(chain_u, self.HEADERS['C']) self._append_header(chain_u, self.HEADERS['D']) self._append_header(chain_u, self.HEADERS['E']) self._append_header(chain_u, self.HEADERS['F']) self._append_header(chain_u, self.HEADERS['O']) self._append_header(chain_u, self.HEADERS['P']) self._append_header(chain_u, self.HEADERS['Q']) self._append_header(chain_u, self.HEADERS['R']) chain_l = chain_u.fork(self.HEADERS['G']) self._append_header(chain_l, self.HEADERS['H']) self._append_header(chain_l, self.HEADERS['I']) self._append_header(chain_l, self.HEADERS['J']) # do checks self.assertEqual(2, len(blockchain.blockchains)) self.assertEqual(1, len(os.listdir(os.path.join(self.data_dir, "forks")))) self.assertEqual(0, chain_u.forkpoint) self.assertEqual(None, chain_u.parent) self.assertEqual(constants.net.GENESIS, chain_u._forkpoint_hash) self.assertEqual(None, chain_u._prev_hash) self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"), chain_u.path()) self.assertEqual(10 * 80, os.stat(chain_u.path()).st_size) self.assertEqual(6, chain_l.forkpoint) self.assertEqual(chain_u, chain_l.parent) self.assertEqual(hash_header(self.HEADERS['G']), chain_l._forkpoint_hash) self.assertEqual(hash_header(self.HEADERS['F']), chain_l._prev_hash) self.assertEqual( os.path.join( self.data_dir, "forks", "fork2_6_61b274ea009f7566740eec9aeff7676c6dffb4136a1033427f5d7647e0fe0bed_e3599615f2e4e04bd143ecaead68800b3e4497113eddc17c1e3602e01622caf8" ), chain_l.path()) self.assertEqual(4 * 80, os.stat(chain_l.path()).st_size) self._append_header(chain_l, self.HEADERS['K']) # chains were swapped, do checks self.assertEqual(2, len(blockchain.blockchains)) self.assertEqual(1, len(os.listdir(os.path.join(self.data_dir, "forks")))) self.assertEqual(6, chain_u.forkpoint) self.assertEqual(chain_l, chain_u.parent) self.assertEqual(hash_header(self.HEADERS['O']), chain_u._forkpoint_hash) self.assertEqual(hash_header(self.HEADERS['F']), chain_u._prev_hash) self.assertEqual( os.path.join( self.data_dir, "forks", "fork2_6_61b274ea009f7566740eec9aeff7676c6dffb4136a1033427f5d7647e0fe0bed_a9e0ca750c5f9d2e2a22d858c2282d64936f672ab6030ba9edd45f291e9f9b1f" ), chain_u.path()) self.assertEqual(4 * 80, os.stat(chain_u.path()).st_size) self.assertEqual(0, chain_l.forkpoint) self.assertEqual(None, chain_l.parent) self.assertEqual(constants.net.GENESIS, chain_l._forkpoint_hash) self.assertEqual(None, chain_l._prev_hash) self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"), chain_l.path()) self.assertEqual(11 * 80, os.stat(chain_l.path()).st_size) for b in (chain_u, chain_l): self.assertTrue( all([ b.can_connect(b.read_header(i), False) for i in range(b.height()) ])) self._append_header(chain_u, self.HEADERS['S']) self._append_header(chain_u, self.HEADERS['T']) self._append_header(chain_u, self.HEADERS['U']) self._append_header(chain_l, self.HEADERS['L']) chain_z = chain_l.fork(self.HEADERS['M']) self._append_header(chain_z, self.HEADERS['N']) self._append_header(chain_z, self.HEADERS['X']) self._append_header(chain_z, self.HEADERS['Y']) self._append_header(chain_z, self.HEADERS['Z']) # chain_z became best chain, do checks self.assertEqual(3, len(blockchain.blockchains)) self.assertEqual(2, len(os.listdir(os.path.join(self.data_dir, "forks")))) self.assertEqual(0, chain_z.forkpoint) self.assertEqual(None, chain_z.parent) self.assertEqual(constants.net.GENESIS, chain_z._forkpoint_hash) self.assertEqual(None, chain_z._prev_hash) self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"), chain_z.path()) self.assertEqual(14 * 80, os.stat(chain_z.path()).st_size) self.assertEqual(9, chain_l.forkpoint) self.assertEqual(chain_z, chain_l.parent) self.assertEqual(hash_header(self.HEADERS['J']), chain_l._forkpoint_hash) self.assertEqual(hash_header(self.HEADERS['I']), chain_l._prev_hash) self.assertEqual( os.path.join( self.data_dir, "forks", "fork2_9_67b0765c4090086b9dcecb70ba3d10e807df305cce403e4c6e4ca9edfe4d5a1d_a879ddca14a9d4d1c81ee90401910e7a186ee6511972aefa8791524a94463cf9" ), chain_l.path()) self.assertEqual(3 * 80, os.stat(chain_l.path()).st_size) self.assertEqual(6, chain_u.forkpoint) self.assertEqual(chain_z, chain_u.parent) self.assertEqual(hash_header(self.HEADERS['O']), chain_u._forkpoint_hash) self.assertEqual(hash_header(self.HEADERS['F']), chain_u._prev_hash) self.assertEqual( os.path.join( self.data_dir, "forks", "fork2_6_61b274ea009f7566740eec9aeff7676c6dffb4136a1033427f5d7647e0fe0bed_a9e0ca750c5f9d2e2a22d858c2282d64936f672ab6030ba9edd45f291e9f9b1f" ), chain_u.path()) self.assertEqual(7 * 80, os.stat(chain_u.path()).st_size) for b in (chain_u, chain_l, chain_z): self.assertTrue( all([ b.can_connect(b.read_header(i), False) for i in range(b.height()) ])) self.assertEqual(constants.net.GENESIS, chain_z.get_hash(0)) self.assertEqual(hash_header(self.HEADERS['F']), chain_z.get_hash(5)) self.assertEqual(hash_header(self.HEADERS['G']), chain_z.get_hash(6)) self.assertEqual(hash_header(self.HEADERS['I']), chain_z.get_hash(8)) self.assertEqual(hash_header(self.HEADERS['M']), chain_z.get_hash(9)) self.assertEqual(hash_header(self.HEADERS['Z']), chain_z.get_hash(13)) def test_doing_multiple_swaps_after_single_new_header(self): blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain( config=self.config, forkpoint=0, parent=None, forkpoint_hash=constants.net.GENESIS, prev_hash=None) open(chain_u.path(), 'w+').close() self._append_header(chain_u, self.HEADERS['A']) self._append_header(chain_u, self.HEADERS['B']) self._append_header(chain_u, self.HEADERS['C']) self._append_header(chain_u, self.HEADERS['D']) self._append_header(chain_u, self.HEADERS['E']) self._append_header(chain_u, self.HEADERS['F']) self._append_header(chain_u, self.HEADERS['O']) self._append_header(chain_u, self.HEADERS['P']) self._append_header(chain_u, self.HEADERS['Q']) self._append_header(chain_u, self.HEADERS['R']) self._append_header(chain_u, self.HEADERS['S']) self.assertEqual(1, len(blockchain.blockchains)) self.assertEqual(0, len(os.listdir(os.path.join(self.data_dir, "forks")))) chain_l = chain_u.fork(self.HEADERS['G']) self._append_header(chain_l, self.HEADERS['H']) self._append_header(chain_l, self.HEADERS['I']) self._append_header(chain_l, self.HEADERS['J']) self._append_header(chain_l, self.HEADERS['K']) # now chain_u is best chain, but it's tied with chain_l self.assertEqual(2, len(blockchain.blockchains)) self.assertEqual(1, len(os.listdir(os.path.join(self.data_dir, "forks")))) chain_z = chain_l.fork(self.HEADERS['M']) self._append_header(chain_z, self.HEADERS['N']) self._append_header(chain_z, self.HEADERS['X']) self.assertEqual(3, len(blockchain.blockchains)) self.assertEqual(2, len(os.listdir(os.path.join(self.data_dir, "forks")))) # chain_z became best chain, do checks self.assertEqual(0, chain_z.forkpoint) self.assertEqual(None, chain_z.parent) self.assertEqual(constants.net.GENESIS, chain_z._forkpoint_hash) self.assertEqual(None, chain_z._prev_hash) self.assertEqual(os.path.join(self.data_dir, "blockchain_headers"), chain_z.path()) self.assertEqual(12 * 80, os.stat(chain_z.path()).st_size) self.assertEqual(9, chain_l.forkpoint) self.assertEqual(chain_z, chain_l.parent) self.assertEqual(hash_header(self.HEADERS['J']), chain_l._forkpoint_hash) self.assertEqual(hash_header(self.HEADERS['I']), chain_l._prev_hash) self.assertEqual( os.path.join( self.data_dir, "forks", "fork2_9_67b0765c4090086b9dcecb70ba3d10e807df305cce403e4c6e4ca9edfe4d5a1d_a879ddca14a9d4d1c81ee90401910e7a186ee6511972aefa8791524a94463cf9" ), chain_l.path()) self.assertEqual(2 * 80, os.stat(chain_l.path()).st_size) self.assertEqual(6, chain_u.forkpoint) self.assertEqual(chain_z, chain_u.parent) self.assertEqual(hash_header(self.HEADERS['O']), chain_u._forkpoint_hash) self.assertEqual(hash_header(self.HEADERS['F']), chain_u._prev_hash) self.assertEqual( os.path.join( self.data_dir, "forks", "fork2_6_61b274ea009f7566740eec9aeff7676c6dffb4136a1033427f5d7647e0fe0bed_a9e0ca750c5f9d2e2a22d858c2282d64936f672ab6030ba9edd45f291e9f9b1f" ), chain_u.path()) self.assertEqual(5 * 80, os.stat(chain_u.path()).st_size) self.assertEqual(constants.net.GENESIS, chain_z.get_hash(0)) self.assertEqual(hash_header(self.HEADERS['F']), chain_z.get_hash(5)) self.assertEqual(hash_header(self.HEADERS['G']), chain_z.get_hash(6)) self.assertEqual(hash_header(self.HEADERS['I']), chain_z.get_hash(8)) self.assertEqual(hash_header(self.HEADERS['M']), chain_z.get_hash(9)) self.assertEqual(hash_header(self.HEADERS['X']), chain_z.get_hash(11)) for b in (chain_u, chain_l, chain_z): self.assertTrue( all([ b.can_connect(b.read_header(i), False) for i in range(b.height()) ])) def get_chains_that_contain_header_helper(self, header: dict): height = header['block_height'] header_hash = hash_header(header) return blockchain.get_chains_that_contain_header(height, header_hash) def test_get_chains_that_contain_header(self): blockchain.blockchains[constants.net.GENESIS] = chain_u = Blockchain( config=self.config, forkpoint=0, parent=None, forkpoint_hash=constants.net.GENESIS, prev_hash=None) open(chain_u.path(), 'w+').close() self._append_header(chain_u, self.HEADERS['A']) self._append_header(chain_u, self.HEADERS['B']) self._append_header(chain_u, self.HEADERS['C']) self._append_header(chain_u, self.HEADERS['D']) self._append_header(chain_u, self.HEADERS['E']) self._append_header(chain_u, self.HEADERS['F']) self._append_header(chain_u, self.HEADERS['O']) self._append_header(chain_u, self.HEADERS['P']) self._append_header(chain_u, self.HEADERS['Q']) chain_l = chain_u.fork(self.HEADERS['G']) self._append_header(chain_l, self.HEADERS['H']) self._append_header(chain_l, self.HEADERS['I']) self._append_header(chain_l, self.HEADERS['J']) self._append_header(chain_l, self.HEADERS['K']) self._append_header(chain_l, self.HEADERS['L']) chain_z = chain_l.fork(self.HEADERS['M']) self.assertEqual([chain_l, chain_z, chain_u], self.get_chains_that_contain_header_helper( self.HEADERS['A'])) self.assertEqual([chain_l, chain_z, chain_u], self.get_chains_that_contain_header_helper( self.HEADERS['C'])) self.assertEqual([chain_l, chain_z, chain_u], self.get_chains_that_contain_header_helper( self.HEADERS['F'])) self.assertEqual([chain_l, chain_z], self.get_chains_that_contain_header_helper( self.HEADERS['G'])) self.assertEqual([chain_l, chain_z], self.get_chains_that_contain_header_helper( self.HEADERS['I'])) self.assertEqual([chain_z], self.get_chains_that_contain_header_helper( self.HEADERS['M'])) self.assertEqual([chain_l], self.get_chains_that_contain_header_helper( self.HEADERS['K'])) self._append_header(chain_z, self.HEADERS['N']) self._append_header(chain_z, self.HEADERS['X']) self._append_header(chain_z, self.HEADERS['Y']) self._append_header(chain_z, self.HEADERS['Z']) self.assertEqual([chain_z, chain_l, chain_u], self.get_chains_that_contain_header_helper( self.HEADERS['A'])) self.assertEqual([chain_z, chain_l, chain_u], self.get_chains_that_contain_header_helper( self.HEADERS['C'])) self.assertEqual([chain_z, chain_l, chain_u], self.get_chains_that_contain_header_helper( self.HEADERS['F'])) self.assertEqual([chain_u], self.get_chains_that_contain_header_helper( self.HEADERS['O'])) self.assertEqual([chain_z, chain_l], self.get_chains_that_contain_header_helper( self.HEADERS['I']))
def setUp(self): super().setUp() self.header = deserialize_header(bfh(self.valid_header), 1296288)
def sign_transaction(self, tx, password): if tx.is_complete(): return try: p2pkhTransaction = True inputhasharray = [] hasharray = [] pubkeyarray = [] # Build hasharray from inputs for i, txin in enumerate(tx.inputs()): if txin.is_coinbase_input(): self.give_error( "Coinbase not supported") # should never happen if txin.script_type != 'p2pkh': p2pkhTransaction = False my_pubkey, inputPath = self.find_my_pubkey_in_txinout(txin) if not inputPath: self.give_error("No matching pubkey for sign_transaction" ) # should never happen inputPath = convert_bip32_intpath_to_strpath(inputPath) inputHash = sha256d(bfh(tx.serialize_preimage(i))) hasharray_i = { 'hash': to_hexstr(inputHash), 'keypath': inputPath } hasharray.append(hasharray_i) inputhasharray.append(inputHash) # Build pubkeyarray from outputs for txout in tx.outputs(): assert txout.address if txout.is_change: changePubkey, changePath = self.find_my_pubkey_in_txinout( txout) assert changePath changePath = convert_bip32_intpath_to_strpath(changePath) changePubkey = changePubkey.hex() pubkeyarray_i = { 'pubkey': changePubkey, 'keypath': changePath } pubkeyarray.append(pubkeyarray_i) # Special serialization of the unsigned transaction for # the mobile verification app. # At the moment, verification only works for p2pkh transactions. if p2pkhTransaction: tx_copy = copy.deepcopy(tx) # monkey-patch method of tx_copy instance to change serialization def input_script(self, txin: PartialTxInput, *, estimate_size=False): if txin.script_type == 'p2pkh': return Transaction.get_preimage_script(txin) raise Exception("unsupported type %s" % txin.script_type) tx_copy.input_script = input_script.__get__( tx_copy, PartialTransaction) tx_dbb_serialized = tx_copy.serialize_to_network() else: # We only need this for the signing echo / verification. tx_dbb_serialized = None # Build sign command dbb_signatures = [] steps = math.ceil(1.0 * len(hasharray) / self.maxInputs) for step in range(int(steps)): hashes = hasharray[step * self.maxInputs:(step + 1) * self.maxInputs] msg = { "sign": { "data": hashes, "checkpub": pubkeyarray, }, } if tx_dbb_serialized is not None: msg["sign"]["meta"] = to_hexstr(sha256d(tx_dbb_serialized)) msg = json.dumps(msg).encode('ascii') dbb_client = self.plugin.get_client(self) if not dbb_client.is_paired(): raise Exception("Could not sign transaction.") reply = dbb_client.hid_send_encrypt(msg) if 'error' in reply: raise Exception(reply['error']['message']) if 'echo' not in reply: raise Exception("Could not sign transaction.") if self.plugin.is_mobile_paired( ) and tx_dbb_serialized is not None: reply['tx'] = tx_dbb_serialized self.plugin.comserver_post_notification(reply) if steps > 1: self.handler.show_message( _("Signing large transaction. Please be patient ...") + "\n\n" + _("To continue, touch the Digital Bitbox's blinking light for 3 seconds." ) + " " + _("(Touch {} of {})").format((step + 1), steps) + "\n\n" + _("To cancel, briefly touch the blinking light or wait for the timeout." ) + "\n\n") else: self.handler.show_message( _("Signing transaction...") + "\n\n" + _("To continue, touch the Digital Bitbox's blinking light for 3 seconds." ) + "\n\n" + _("To cancel, briefly touch the blinking light or wait for the timeout." )) # Send twice, first returns an echo for smart verification reply = dbb_client.hid_send_encrypt(msg) self.handler.finished() if 'error' in reply: if reply["error"].get('code') in (600, 601): # aborted via LED short touch or timeout raise UserCancelled() raise Exception(reply['error']['message']) if 'sign' not in reply: raise Exception("Could not sign transaction.") dbb_signatures.extend(reply['sign']) # Fill signatures if len(dbb_signatures) != len(tx.inputs()): raise Exception("Incorrect number of transactions signed." ) # Should never occur for i, txin in enumerate(tx.inputs()): for pubkey_bytes in txin.pubkeys: if txin.is_complete(): break signed = dbb_signatures[i] if 'recid' in signed: # firmware > v2.1.1 recid = int(signed['recid'], 16) s = binascii.unhexlify(signed['sig']) h = inputhasharray[i] pk = ecc.ECPubkey.from_sig_string(s, recid, h) pk = pk.get_public_key_hex(compressed=True) elif 'pubkey' in signed: # firmware <= v2.1.1 pk = signed['pubkey'] if pk != pubkey_bytes.hex(): continue sig_r = int(signed['sig'][:64], 16) sig_s = int(signed['sig'][64:], 16) sig = ecc.der_sig_from_r_and_s(sig_r, sig_s) sig = to_hexstr(sig) + '01' tx.add_signature_to_txin(txin_idx=i, signing_pubkey=pubkey_bytes.hex(), sig=sig) except UserCancelled: raise except BaseException as e: self.give_error(e, True) else: _logger.info(f"Transaction is_complete {tx.is_complete()}")
def test_add_number_to_script(self): # https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#numbers self.assertEqual(add_number_to_script(0), bytes([opcodes.OP_0])) self.assertEqual(add_number_to_script(7), bytes([opcodes.OP_7])) self.assertEqual(add_number_to_script(16), bytes([opcodes.OP_16])) self.assertEqual(add_number_to_script(-1), bytes([opcodes.OP_1NEGATE])) self.assertEqual(add_number_to_script(-127), bfh('01ff')) self.assertEqual(add_number_to_script(-2), bfh('0182')) self.assertEqual(add_number_to_script(17), bfh('0111')) self.assertEqual(add_number_to_script(127), bfh('017f')) self.assertEqual(add_number_to_script(-32767), bfh('02ffff')) self.assertEqual(add_number_to_script(-128), bfh('028080')) self.assertEqual(add_number_to_script(128), bfh('028000')) self.assertEqual(add_number_to_script(32767), bfh('02ff7f')) self.assertEqual(add_number_to_script(-8388607), bfh('03ffffff')) self.assertEqual(add_number_to_script(-32768), bfh('03008080')) self.assertEqual(add_number_to_script(32768), bfh('03008000')) self.assertEqual(add_number_to_script(8388607), bfh('03ffff7f')) self.assertEqual(add_number_to_script(-2147483647), bfh('04ffffffff')) self.assertEqual(add_number_to_script(-8388608 ), bfh('0400008080')) self.assertEqual(add_number_to_script(8388608), bfh('0400008000')) self.assertEqual(add_number_to_script(2147483647), bfh('04ffffff7f'))
def test_push_script(self): # https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#push-operators self.assertEqual(push_script(''), bh2u(bytes([opcodes.OP_0]))) self.assertEqual(push_script('07'), bh2u(bytes([opcodes.OP_7]))) self.assertEqual(push_script('10'), bh2u(bytes([opcodes.OP_16]))) self.assertEqual(push_script('81'), bh2u(bytes([opcodes.OP_1NEGATE]))) self.assertEqual(push_script('11'), '0111') self.assertEqual(push_script(75 * '42'), '4b' + 75 * '42') self.assertEqual(push_script(76 * '42'), bh2u(bytes([opcodes.OP_PUSHDATA1]) + bfh('4c' + 76 * '42'))) self.assertEqual(push_script(100 * '42'), bh2u(bytes([opcodes.OP_PUSHDATA1]) + bfh('64' + 100 * '42'))) self.assertEqual(push_script(255 * '42'), bh2u(bytes([opcodes.OP_PUSHDATA1]) + bfh('ff' + 255 * '42'))) self.assertEqual(push_script(256 * '42'), bh2u(bytes([opcodes.OP_PUSHDATA2]) + bfh('0001' + 256 * '42'))) self.assertEqual(push_script(520 * '42'), bh2u(bytes([opcodes.OP_PUSHDATA2]) + bfh('0802' + 520 * '42')))