Exemplo n.º 1
0
def test_consume():
    """
    Test that all words in the wordlist appear at least once in set of 3000
    uniformly distributed random mnemonics. Only as many mnemonics as
    necessary to confirm are actually generated.
    """
    # o look a python algorithm! :O
    seen = set()
    wl = frozenset(WORDLISTS["english"])
    i = 0
    while seen != wl:
        ent = os.urandom(20)
        m = Mnemonic(ent)
        seen |= set(m)
        i += 1
    if i > 3000:
        assert False, "Wordlist consumption timeout"
Exemplo n.º 2
0
def test_checksum_fail():
    for l in {3 * x for x in range(4, 9)}:  # use the correct length
        x = bad_words[:l]
        with pytest.raises(ValueError, match="Bad mnemonic checksum"):
            Mnemonic(x)
Exemplo n.º 3
0
def test_badlen_fail():
    l = set(range(1, 30)) - {3 * x for x in range(4, 9)}
    for ll in l:
        x = bad_words[:ll]
        with pytest.raises(ValueError, match="Incorrect Mnemonic"):
            Mnemonic(x)
Exemplo n.º 4
0
def test_int_constructor():
    for n in range(16, 30, 4):
        m = Mnemonic(n)
        assert all(x in m.wordlist for x in m)
        assert len(m) == math.ceil(n * 8 / 11)
Exemplo n.º 5
0
def test_default_constuctor():
    m = Mnemonic()
    assert all(x in m.wordlist for x in m)
    assert len(m) == 15
Exemplo n.º 6
0
def test_string_constructor(testvec):
    wl, passphrase, vectors = testvec
    for tv in vectors:
        m = Mnemonic(tv["mnemonic"], wl)
        assert m == tuple(normalize("NFKD", tv["mnemonic"]).split())
        assert m.to_seed(passphrase).hex() == tv["seed"]
Exemplo n.º 7
0
def test_entropy_constructor(testvec):
    wl, passphrase, vectors = testvec
    for tv in vectors:
        m = Mnemonic(bytes.fromhex(tv["entropy"]), wl)
        assert m == tuple(normalize("NFKD", tv["mnemonic"]).split())
        assert m.to_seed(passphrase).hex() == tv["seed"]