def __new__(cls, x): # TODO: the > comparison may leak timing information about plaintexts if (isinstance(x, Integral) and x > bytes2int(p, endian='little'))\ or (isinstance(x, bytes) and bytes2int(x, endian='little') > bytes2int(p, endian='little')): raise ValueError("message too large/long to fit into a plaintext") elif not (isinstance(x, Integral) or isinstance(x, bytes)): raise TypeError( "Can only instantiate PlainText instances from integers or bytes" ) return super(PlainText, cls).__new__(cls, x)
def encode(s, compact=False): """From a byte string, produce a list of words that durably encodes the string. s: the byte string to be encoded compact: instead of using the length encoding scheme, pad by prepending a 1 bit The words in the encoding dictionary were chosen to be common and unambiguous. The encoding also includes a checksum. The encoding is constructed so that common errors are extremely unlikely to produce a valid encoding. """ if not isinstance(s, bytes): raise TypeError("mnemonic.encode can only encode byte strings") k = Keccak() k.absorb(s) checksum_length = max(1, (len(s) - 1).bit_length()) checksum = k.squeeze(checksum_length) length = chr(checksum_length) if compact else encode_varint( len(s), endian='little') s += checksum s += length word_index = 0 i = bytes2int(s) retval = [None] * int(floor(log(i, len(words)) + 1)) for j in xrange(len(retval)): assert i > 0 word_index += i % len(words) word_index %= len(words) retval[j] = words[word_index] i //= len(words) assert i == 0 return tuple(retval)
def randomart(s, height=9, width=17, length=64, border=True, tag=''): """Produce a easy to compare visual representation of a string. Follows the algorithm laid out here http://www.dirk-loss.de/sshvis/drunken_bishop.pdf with the substitution of Keccak for MD5. s: the string to create a representation of height: (optional) the height of the representation to generate, default 9 width: (optional) the width of the representation to generate, default 17 length: (optional) the length of the random walk, essentially how many points are plotted in the representation, default 64 border: (optional) whether to put a border around the representation, default True tag: (optional) a short string to be incorporated into the border, does nothing if border is False, defaults to the empty string """ k = Keccak() k.absorb(s) # we reverse the endianness so that increasing length produces a radically # different randomart i = bytes2int(reversed(k.squeeze(int(ceil(length / 4.0))))) field = [ [0 for _ in xrange(width)] for __ in xrange(height) ] start = (height // 2, width // 2) position = start directions = ((-1, -1), (-1, 1), (1, -1), (1, 1)) for j in xrange(length): row_off, col_off = directions[(i>>(j*2)) % 4] position = (min(max(position[0] + row_off, 0), height - 1), min(max(position[1] + col_off, 0), width - 1)) field[position[0]][position[1]] += 1 field[start[0]][start[1]] = 15 field[position[0]][position[1]] = 16 chars = ' .o+=*BOX@%&#/^SE' if border: if len(tag) > width - 2: tag = tag[:width-2] if tag: tag_pad_len = (width - len(tag) - 2) / 2.0 first_row = '+' + ('-'*int(floor(tag_pad_len))) \ + '['+tag+']' \ + ('-'*int(ceil(tag_pad_len))) + '+\n' else: first_row = '+' + ('-'*width) + '+\n' last_row = '\n+' + ('-'*width) + '+' return first_row \ + '\n'.join('|'+''.join(chars[cell] for cell in row)+'|' for row in field) \ + last_row else: return '\n'.join(''.join(chars[cell] for cell in row) for row in field)
def encode(s, compact=False): """From a byte string, produce a list of words that durably encodes the string. s: the byte string to be encoded compact: instead of using the length encoding scheme, pad by prepending a 1 bit The words in the encoding dictionary were chosen to be common and unambiguous. The encoding also includes a checksum. The encoding is constructed so that common errors are extremely unlikely to produce a valid encoding. """ if not isinstance(s, bytes): raise TypeError("mnemonic.encode can only encode byte strings") k = Keccak() k.absorb(s) checksum_length = max(1, (len(s)-1).bit_length()) checksum = k.squeeze(checksum_length) length = chr(checksum_length) if compact else encode_varint(len(s), endian='little') s += checksum s += length word_index = 0 i = bytes2int(s) retval = [None] * int(floor(log(i, len(words)) + 1)) for j in xrange(len(retval)): assert i > 0 word_index += i % len(words) word_index %= len(words) retval[j] = words[word_index] i //= len(words) assert i == 0 return tuple(retval)
def encode(s, compact=False, checksum=True): """From a byte string, produce a list of words that durably encodes the string. s: the byte string to be encoded compact: instead of using the length encoding scheme, pad by appending a single byte checksum: append a checksum to the byte string before encoding The words in the encoding dictionary were chosen to be common and unambiguous. The encoding is constructed so that common errors are extremely unlikely to produce a valid encoding. """ if not isinstance(s, bytes): raise TypeError("mnemonic.encode can only encode byte strings") s = pad_and_checksum(s, compact, checksum) word_index = 0 i = bytes2int(s, endian='little') retval = [None] * int(floor(log(i, len(words)) + 1)) for j in xrange(len(retval)): assert i > 0 word_index += i % len(words) word_index %= len(words) retval[j] = words[word_index] i //= len(words) assert i == 0 return tuple(retval)
def sign(self, message): # calculate the tree path, a bit-string (represented as an integer) # declaring whether to go right or left at each node # # N.B. for our security guarantees to hold, the path must be # unpredictable by an attacker and deterministically produced from the # message. A path can be reused once before our security guarantees # are invalidated k = keccak.Keccak(**_keccak_args) k.update(k.pad10star1(hash(self, tree_height//8), k.r)) k.update(hash(message, tree_height//8)) tree_path = k.squeeze(tree_height//8) tree_path = bytes2int(tree_path) assert tree_path >> tree_height == 0 keys = [None] * tree_height signatures = [None] * tree_height keys[0] = LamportTreePrivKey.from_seed_index(self, tree_height, tree_path) signatures[0] = keys[0].sign(message) for i in xrange(1, len(keys)-1): keys[i] = LamportTreePrivKey.from_seed_index(self, tree_height - i, tree_path >> i) signatures[i] = keys[i].sign(LamportTreePubKey.from_privkey(keys[i-1]).serialize()) keys[-1] = self signatures[-1] = self._sign(LamportTreePubKey.from_privkey(keys[-2]).serialize()) return LamportSignature(signatures)
def __new__(cls, x): if isinstance(x, Integral): x = int2bytes(x % bytes2int(p, endian='little'), length=32, endian='little') elif isinstance(x, bytes): if len(x) != 32: raise ValueError( "When instantiating Element from bytes, argument must be of length 32" ) x = int2bytes(bytes2int(x, endian='little') % bytes2int(p, endian='little'), length=32, endian='little') else: raise TypeError( "Can only instantiate Element instances from integers or bytes" ) return super(Element, cls).__new__(cls, x)
def sign(self, message): h_message = hash(message, out_len=num_subkey_pairs//8) assert len(self) == len(h_message)*16 i_message = bytes2int(h_message) pubkey_type = globals()[self.pubkey_type] temp = chain([pubkey_type.from_privkey(self)], ( self[(i<<1) | ((i_message >> i) % 2)] for i in xrange(len(h_message)*8) )) signature_type = globals()[self.signature_type] return signature_type(tuple(temp))
def getrandbits(self, n): """Generate a long integer with n random bits""" bytes_needed = max(int(ceil((n-self._cache_len) / 8.0)), 0) self._cache |= bytes2int(self.k.squeeze(bytes_needed)) << self._cache_len self._cache_len += bytes_needed * 8 result = self._cache & ((1<<n) - 1) self._cache >>= n self._cache_len -= n return result
def randomart(s, height=9, width=17, length=64, border=True, tag=''): """Produce a easy to compare visual representation of a string. Follows the algorithm laid out here http://www.dirk-loss.de/sshvis/drunken_bishop.pdf with the substitution of Keccak for MD5. s: the string to create a representation of height: (optional) the height of the representation to generate, default 9 width: (optional) the width of the representation to generate, default 17 length: (optional) the length of the random walk, essentially how many points are plotted in the representation, default 64 border: (optional) whether to put a border around the representation, default True tag: (optional) a short string to be incorporated into the border, does nothing if border is False, defaults to the empty string """ k = Keccak() k.absorb(s) # we reverse the endianness so that increasing length produces a radically # different randomart i = bytes2int(reversed(k.squeeze(int(ceil(length / 4.0)))), endian='little') field = [[0 for _ in xrange(width)] for __ in xrange(height)] start = (height // 2, width // 2) position = start directions = ((-1, -1), (-1, 1), (1, -1), (1, 1)) for j in xrange(length): row_off, col_off = directions[(i >> (j * 2)) % 4] position = (min(max(position[0] + row_off, 0), height - 1), min(max(position[1] + col_off, 0), width - 1)) field[position[0]][position[1]] += 1 field[start[0]][start[1]] = 15 field[position[0]][position[1]] = 16 chars = ' .o+=*BOX@%&#/^SE' if border: if len(tag) > width - 2: tag = tag[:width - 2] if tag: tag_pad_len = (width - len(tag) - 2) / 2.0 first_row = '+' + ('-'*int(floor(tag_pad_len))) \ + '['+tag+']' \ + ('-'*int(ceil(tag_pad_len))) + '+\n' else: first_row = '+' + ('-' * width) + '+\n' last_row = '\n+' + ('-' * width) + '+' return first_row \ + '\n'.join('|'+''.join(chars[cell] for cell in row)+'|' for row in field) \ + last_row else: return '\n'.join(''.join(chars[cell] for cell in row) for row in field)
def verify(self, message): pubkey, privkeys = self[0], self[1:] h_message = hash(message, out_len=num_subkey_pairs//8) assert len(self)-1 == len(h_message)*8 i_message = bytes2int(h_message) selected_pubsubkeys = [ pubkey[(i<<1) | ((i_message >> i) % 2)] for i in xrange(len(h_message)*8) ] # TODO: make timing attack resistant # TODO: perhaps move some of this functionality into a subkey method? for x,y in izip(imap(lambda x: hash(x, out_len=key_size), imap(methodcaller('serialize'), privkeys)), selected_pubsubkeys): if x != y: raise ValueError('Bad signature')
def string_to_element(s): if not isinstance(s, bytes): s = int2bytes(s, endian='little') if len(s) > 30: raise ValueError("Argument too large to fit into an element") encoded_length = encode_varint(len(s), endian='little') null_padding = '\x00' * (31 - len(encoded_length) - len(s)) s += null_padding s += encoded_length s = bytes2int(s, endian='little') if s >= p: raise ValueError("Argument too large to fit into an element") s = Element(s) return s
def __init__(self, c, key, cache=True, bucket_size=5): """Constructor: c: the ciphertext, represented as an integer type ns1: the exponentiated modulus used in generating this ciphertext cache: (optional) if True, we cache the powers of the ciphertext this speeds up the square-and-multiply exponentiation used if lots of homomorphic manipulation takes place, the default is True bucket_size: (optional) only has an effect if cache=True, number of bits per bucket in the cache of powers, default 5 """ if isinstance(c, bytes): c = bytes2int(c) elif isinstance(c, (Integral, mpz_type)): pass else: raise TypeError('Expected argument c to be an integer') if not isinstance(key, DamgaardJurik): raise TypeError( 'Expected argument key to be a DamgaardJurik instance') self.key = key s = int(ceil(log(int(c), int(self.key.n)) - 1)) ns1 = self.key.n**(s + 1) if has_gmpy: c = mpz(c) ns1 = mpz(ns1) self.c = c self.s = s self.ns1 = ns1 if bucket_size > 8: import warnings warnings.warn( "Setting bucket_size > 8 allows timing attacks based on Python's handling of small integers" ) self.bucket_size = bucket_size if cache: self.cache = [[ None for _ in xrange((2**self.bucket_size)) ] for __ in xrange( int(ceil(self.ns1.bit_length() / float(self.bucket_size))))] else: self.cache = None
def encode_unordered(s, compact=False, checksum=True): """From a byte string, produce an unordered set of words that durably encodes the string. s: the byte string to be encoded compact: instead of using the length encoding scheme, pad by appending a single byte checksum: append a checksum to the byte string before encoding The words in the encoding dictionary were chosen to be common and unambiguous. The encoding is constructed so that common errors are extremely unlikely to produce a valid encoding. """ n = bytes2int(pad_and_checksum(s, compact, checksum), endian='little') upper = len(words) + 1 lower = 0 minn = n + 1 maxn = n while minn > n or n >= maxn: length = (upper + lower) // 2 minn = get_rank_length_offset(length) maxn = get_rank_length_offset(length + 1) if n >= maxn: lower = length else: # n < minn upper = length n -= get_rank_length_offset(length) retval = [None] * length for i in xrange(length, 0, -1): upper = len(words) lower = 0 minn = n + 1 maxn = n while minn > n or n >= maxn: c = (upper + lower) // 2 minn = nCk(c, i) maxn = nCk(c + 1, i) if n >= maxn: lower = c else: # n < minn upper = c retval[i - 1] = words[c] n -= minn return frozenset(retval)
def encode_unordered(s, compact=False, checksum=True): """From a byte string, produce an unordered set of words that durably encodes the string. s: the byte string to be encoded compact: instead of using the length encoding scheme, pad by appending a single byte checksum: append a checksum to the byte string before encoding The words in the encoding dictionary were chosen to be common and unambiguous. The encoding is constructed so that common errors are extremely unlikely to produce a valid encoding. """ n = bytes2int(pad_and_checksum(s, compact, checksum), endian='little') upper = len(words)+1 lower = 0 minn = n+1 maxn = n while minn > n or n >= maxn: length = (upper + lower) // 2 minn = get_rank_length_offset(length) maxn = get_rank_length_offset(length+1) if n >= maxn: lower = length else: # n < minn upper = length n -= get_rank_length_offset(length) retval = [None] * length for i in xrange(length, 0, -1): upper = len(words) lower = 0 minn = n+1 maxn = n while minn > n or n >= maxn: c = (upper + lower) // 2 minn = nCk(c, i) maxn = nCk(c+1, i) if n >= maxn: lower = c else: # n < minn upper = c retval[i-1] = words[c] n -= minn return frozenset(retval)
def __init__(self, c, key, cache=True, bucket_size=5): """Constructor: c: the ciphertext, represented as an integer type ns1: the exponentiated modulus used in generating this ciphertext cache: (optional) if True, we cache the powers of the ciphertext this speeds up the square-and-multiply exponentiation used if lots of homomorphic manipulation takes place, the default is True bucket_size: (optional) only has an effect if cache=True, number of bits per bucket in the cache of powers, default 5 """ if isinstance(c, bytes): c = bytes2int(c) elif isinstance(c, (Integral, mpz_type)): pass else: raise TypeError('Expected argument c to be an integer') if not isinstance(key, DamgaardJurik): raise TypeError('Expected argument key to be a DamgaardJurik instance') self.key = key s = int(ceil(log(int(c), int(self.key.n)) - 1)) ns1 = self.key.n ** (s + 1) if has_gmpy: c = mpz(c) ns1 = mpz(ns1) self.c = c self.s = s self.ns1 = ns1 if bucket_size > 8: import warnings warnings.warn("Setting bucket_size > 8 allows timing attacks based on Python's handling of small integers") self.bucket_size = bucket_size if cache: self.cache = [ [ None for _ in xrange((2**self.bucket_size)) ] for __ in xrange(int(ceil(self.ns1.bit_length()/float(self.bucket_size)))) ] else: self.cache = None
def __int__(self): return int(bytes2int(self, endian='little'))
def fromStringToLane(string): """Convert a string of bytes to a lane value""" return bytes2int(string)
] if __name__ == '__main__': import random try: iterations = int(sys.argv[1]) except: iterations = 1000 print >> sys.stderr, "\nTesting encode/decode ordered, not compact, with checksum" for _ in xrange(iterations): n = random.getrandbits(1024) s = int2bytes(n, endian='little') r = encode(s, compact=False, checksum=True) s = decode(r, compact=False, checksum=True) m = bytes2int(s, endian='little') assert n == m, (n, r, m) print >> sys.stderr, '.', sys.stderr.flush() print >> sys.stderr, "\nTesting encode/decode ordered, compact, with checksum" for _ in xrange(iterations): n = random.getrandbits(1024) s = int2bytes(n, endian='little') r = encode(s, compact=True, checksum=True) s = decode(r, compact=True, checksum=True) m = bytes2int(s, endian='little') assert n == m, (n, r, m) print >> sys.stderr, '.', sys.stderr.flush()
def __new__(cls, n): if isinstance(n, bytes): n = bytes2int(n) | (1 << len(n)*8) return cls(n) else: return super(DamgaardJurikPlaintext, cls).__new__(cls, n)
def __new__(cls, n): if isinstance(n, bytes): n = bytes2int(n) | (1 << len(n) * 8) return cls(n) else: return super(DamgaardJurikPlaintext, cls).__new__(cls, n)
def __long__(self): return long(bytes2int(self, endian='little'))
__all__ = ['encode', 'decode', 'randomart', 'encode_unordered', 'decode_unordered'] if __name__ == '__main__': import random try: iterations = int(sys.argv[1]) except: iterations = 1000 print >>sys.stderr, "\nTesting encode/decode ordered, not compact, with checksum" for _ in xrange(iterations): n = random.getrandbits(1024) s = int2bytes(n, endian='little') r = encode(s, compact=False, checksum=True) s = decode(r, compact=False, checksum=True) m = bytes2int(s, endian='little') assert n == m, (n, r, m) print >>sys.stderr, '.', sys.stderr.flush() print >>sys.stderr, "\nTesting encode/decode ordered, compact, with checksum" for _ in xrange(iterations): n = random.getrandbits(1024) s = int2bytes(n, endian='little') r = encode(s, compact=True, checksum=True) s = decode(r, compact=True, checksum=True) m = bytes2int(s, endian='little') assert n == m, (n, r, m) print >>sys.stderr, '.', sys.stderr.flush()