def hammingpattern(pattern, text, hamming): s = "" hamming = int(hamming) for i in range (0, len(text)-len(pattern)+1): if (hammingdistance(text [i:i+len(pattern)], pattern)<=hamming): s+= str(i) s+=" " return s
def PatternCountHamming(text, pattern, d): count = 0 d = int(d) for i in range(0, len(text) - len(pattern) + 1): ## print text [i:i+len(pattern)] if hammingdistance(text[i : i + len(pattern)], pattern) <= d: count = count + 1 return count
data = open(file,'r') data = data.read().strip('/n') data = base64.b64decode(data) unbased = data decoded_list=[] import binascii #def break_repeatingXOR(base64_strings): #unbased = base64.b64decode(base64_strings) key_distances=[] from hammingdistance import hammingdistance for keysize in range(2,41): chunk1 = str(unbased[0:keysize-1].ljust(keysize,b'\0'),'utf-8') chunk2 = str(unbased[keysize:2*keysize-1].ljust(keysize,b'\0'),'utf-8') chunk3 = str(unbased[keysize*2:keysize*3-1].ljust(keysize,b'\0'),'utf-8') chunk4 = str(unbased[keysize*3:keysize*4-1].ljust(keysize,b'\0'),'utf-8') distance1=hammingdistance(chunk1,chunk2) distance2=hammingdistance(chunk2,chunk3) distance3=hammingdistance(chunk3,chunk4) distance4=hammingdistance(chunk4,chunk1) average_distance = (distance1+distance2+distance3+distance4)/4 key_distances.append(average_distance/keysize) print(key_distances[27]) keysize_totry = heapq.nsmallest(8,range(len(key_distances)), key_distances.__getitem__) keysize_totry=[x.__add__(2) for x in keysize_totry] print(keysize_totry) for keylength in keysize_totry: blocks=[] data_stream = io.BytesIO(data) #data_stream.read(2) # break ciphertext into blocks