def main(): # generate 16 bytes of entropy and # convert to a mnemonic phrase (12 words) entropy = bytes([random.getrandbits(8) for i in range(16)]) mnemonic = bip39.mnemonic_from_bytes(entropy) # or just define hardcoded: mnemonic = "alien visual jealous source coral memory embark certain radar capable clip edit" print(mnemonic) # convert to seed, empty password seed = bip39.mnemonic_to_seed(mnemonic) # convert to the root key # you can define the version - x/y/zprv for desired network root = bip32.HDKey.from_seed(seed, version=NETWORKS["test"]["xprv"]) print(root.to_base58()) print("\nBIP-44 - legacy") # derive account according to bip44 bip44_xprv = root.derive("m/44h/1h/0h") print(bip44_xprv.to_base58()) # corresponding master public key: bip44_xpub = bip44_xprv.to_public() print(bip44_xpub.to_base58()) # first 5 receiving addresses for i in range(5): # .key member is a public key for HD public keys # and a private key for HD private keys pub = bip44_xpub.derive("m/0/%d" % i).key sc = script.p2pkh(pub) print(sc.address(NETWORKS["test"])) print("\nBIP-84 - native segwit") # derive account according to bip84 bip84_xprv = root.derive("m/84h/1h/0h") # you can also change version of the key to get zpub (vpub on testnet) bip84_xprv.version = NETWORKS["test"]["zprv"] print(bip84_xprv.to_base58()) # corresponding master public key: bip84_xpub = bip84_xprv.to_public() print(bip84_xpub.to_base58()) # first 5 receiving addresses for i in range(5): pub = bip84_xpub.derive("m/0/%d" % i).key sc = script.p2wpkh(pub) print(sc.address(NETWORKS["test"])) print("\nBIP-49 - nested segwit") # derive account according to bip49 bip49_xprv = root.derive("m/49h/1h/0h") # you can also change version of the key to get ypub (upub on testnet) bip49_xprv.version = NETWORKS["test"]["yprv"] print(bip49_xprv.to_base58()) # corresponding master public key: bip49_xpub = bip49_xprv.to_public() print(bip49_xpub.to_base58()) # first 5 receiving addresses for i in range(5): pub = bip49_xpub.derive("m/0/%d" % i).key # use p2sh(p2wpkh(pubkey)) to get nested segwit scriptpubkey sc = script.p2sh(script.p2wpkh(pub)) print(sc.address(NETWORKS["test"]))
def test_bip39(self): for [seed, exp_mnemonic, hex_seed, xprv] in VECTORS: act_mnemonic = mnemonic_from_bytes(unhexlify(seed)) act_xkey = HDKey.from_seed( mnemonic_to_seed(act_mnemonic, password="******")) self.assertEqual(act_mnemonic, exp_mnemonic) self.assertTrue(mnemonic_is_valid(act_mnemonic)) self.assertEqual( hexlify(mnemonic_to_bytes(act_mnemonic)).decode(), seed) self.assertEqual(act_xkey.to_base58(), xprv)
def calculate_checksum(partial_mnemonic: list, wordlist): # Provide 11- or 23-word mnemonic, returns complete mnemonic w/checksum if len(partial_mnemonic) not in [11, 23]: raise Exception("Pass in a 11- or 23-word mnemonic") # Work on a copy of the input list mnemonic_copy = partial_mnemonic.copy() mnemonic_copy.append("abandon") # Ignores the final checksum word and recalcs mnemonic_bytes = bip39.mnemonic_to_bytes(unicodedata.normalize( "NFKD", " ".join(mnemonic_copy)), ignore_checksum=True, wordlist=wordlist) # Return as a list return bip39.mnemonic_from_bytes(mnemonic_bytes).split()
def test_alternate_wordlist(self): # Spanish mnemonics validated via https://iancoleman.io/bip39/#spanish mnemonics = [ ("título paso humano cañón enfado ropero hueco cromo blusa turno fideo glaciar verano baba gordo fila trance íntimo rotar gustar sombra revés laguna jardín", "bb0c5656117fd52d995dafca2d692974e74cb7c713c35871a0915d7bda6122694b2b67664113b198d2c1dd828195587c7dec8d6179f93d2157d6a11d8d0a949d" ), ("natural tóxico choque regreso norte tarta uña prisión bulto ángulo fervor nariz", "30affe746f3a81816739c2dacc3de426084482b729c7b592cee0ff2bdf73315943a5da8d8da4afd767f905d5ded4b0ab3a948d7eff9834fca5e8691a186fee20" ), ] from .data.bip39_es import WORDLIST as ES_WORDLIST for mnemonic, expected_seed in mnemonics: self.assertTrue(mnemonic_is_valid(mnemonic, wordlist=ES_WORDLIST)) self.assertEqual( hexlify(mnemonic_to_seed(mnemonic, wordlist=ES_WORDLIST)).decode(), expected_seed) self.assertEqual( mnemonic_from_bytes(mnemonic_to_bytes(mnemonic, wordlist=ES_WORDLIST), wordlist=ES_WORDLIST), mnemonic)
def test_fix_checksum(self): invalid_mnemonic = ("ghost " * 12).strip() self.assertRaises(ValueError, mnemonic_to_bytes, invalid_mnemonic) entropy = mnemonic_to_bytes(invalid_mnemonic, ignore_checksum=True) valid_mnemonic = mnemonic_from_bytes(entropy) self.assertEqual(valid_mnemonic, "ghost " * 11 + "gentle")
def test_invalid_seed(self): seed = "0000000000000000000000000000000042" self.assertRaises(ValueError, lambda x: mnemonic_from_bytes(unhexlify(x)), seed)
def generate_mnemonic_from_image(image): hash = hashlib.sha256(image.tobytes()) # Return as a list return bip39.mnemonic_from_bytes(hash.digest()).split()
def generate_mnemonic_from_dice(roll_data: str): entropy_bytes = hashlib.sha256(roll_data.encode()).digest() # Return as a list return bip39.mnemonic_from_bytes(entropy_bytes).split()
def generate_mnemonic_from_bytes(entropy_bytes): # Return as a list return bip39.mnemonic_from_bytes(entropy_bytes).split()