def run(): buff = base64.b64decode(''.join([line.rstrip() for line in open('set1/e6.txt', 'r')])) # Number of sample blocks we want to use to calculate hamming dist avg_blocks = 4 keys = [] for key_size in range(2,41): # Grab first X number of buffer blocks blocks = [buff[i * key_size:(i * key_size) + key_size] for i in list(range(0, avg_blocks))] # Calculate hamming dist between them hamm = sum([hamming(blocks[i], blocks[j]) for i in list(range(0, avg_blocks)) for j in list(range(i, avg_blocks))]) keys.append({'size': key_size, 'hamming': hamm / key_size}) skeys = sorted(keys, key=lambda s: s['hamming']) key_size=skeys[0]['size'] print("Key size: %d" % key_size) # Transpose the buffer into key_size number # eg: for ks: 4 and buff: '123456789' -> ['159', '26', '37', '48'] len_buff=len(buff) blocks = [bytearray([buff[(key_size * j) + i] for j in range(0, math.ceil((len_buff - i) / key_size))]) for i in range(0, key_size)] key = bytearray() for i in list(range(0, len(blocks))): res = find_best_xor_match(blocks[i], 1) key += res[0].mask print("Key: %s" % key) print(key_encode(buff, key))
def run(): expected = b"0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f" orig = bytearray("Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal", "ascii") key = bytearray("ICE", "ascii") encoded = key_encode(orig, key) print(binascii.hexlify(encoded) == expected)