def decode_test8(): encoded = bitarray('11011100') actual = hamming.decode(encoded) expected = bitarray('0100') return (0, "") if actual == expected else ( 1, "decode_test8 FAILED! Expected: {0}, Actual: {1}\n".format( expected, actual))
def decode(file_name): border.rotate(file_name) image = Image.open("temp.png") q = border.find("temp.png") ind = sp.argmin(sp.sum(q, 1), 0) up_left = q[ind, 0] + 2 up_top = q[ind, 1] + 2 d_right = q[ind+1, 0] - 3 d_bottom = q[ind-1, 1] - 3 box = (up_left, up_top, d_right, d_bottom) region = image.crop(box) h_sum = sp.sum(region, 0) m = argrelmax(sp.correlate(h_sum, h_sum, 'same')) s = sp.average(sp.diff(m)) m = int(round(d_right - up_left)/s) if m % 3 != 0: m += 3 - m % 3 n = int(round(d_bottom - up_top)/s) if n % 4 != 0: n += 4 - n % 4 s = int(round(s))+1 region = region.resize((s*m, s*n), PIL.Image.ANTIALIAS) region.save("0.png") pix = region.load() matrix = mix.off(rec.matrix(pix, s, m, n)) str2 = hamming.decode(array_to_str(matrix)) return hamming.bin_to_str(str2)
def decode_test5(): encoded = bitarray('1' + ('0' * 4097)) # overall parity bit in error actual = hamming.decode(encoded) expected = bitarray('0' * 4084) return (0, "") if actual == expected else ( 1, "decode_test5 FAILED! Expected: {0}, Actual: {1}\n".format( expected, actual))
def decode_test4(): encoded = bitarray('1111') # no bits in error actual = hamming.decode(encoded) expected = bitarray('1') return (0, "") if actual == expected else ( 1, "decode_test4 FAILED! Expected: {0}, Actual: {1}\n".format( expected, actual))
def zdekoduj( input ): #funkcja przywraca liste do formy kompatybilnej z bilioteka, wykonuje naprawe kodem Hamminga i zwraca zdekodowana liste jako wyjscie kanalu cos = np.reshape( np.array(input), (-1, 8) ) #powrot do poprzedniej formy list listy kompatybilnej z biblioteka, uzywane po powrocie z kanalu #print(cos) syndrom = syndrome(cos) corected = correct(cos, syndrom) #proba naprawy bledow w kodzi myoutput = decode(corected) #dekodowanie kodu hamminga #print(myoutput) return myoutput #funkcja zwraca liste zdekodowana
def callback(rxstr): count = ord(rxstr[0]) b1 = "{:08b}".format(ord(rxstr[1])) b2 = "{:08b}".format(ord(rxstr[2])) bits = map(int, b1 + b2) parity = sum(bits) % 2 data, error = hamming.decode(bits[:-1]) j.setBits(data) if l is None: print count, j, error, parity else: if parity == 0 and error == 0: color = 0xffffff elif parity > 0 and error > 0: color = 0xffff00 else: color = 0xff0000 l.go(count, color)
blurred_img = randomize(img_with_message) else: blurred_img = img_with_message print("Blurring Image") im = Image.fromarray(blurred_img) im.save("image_blurred.bmp") blurred_image = mpimg.imread("image_blurred.bmp") plt.title("Blurred Image") plt.imshow(blurred_image) plt.show() # extract the message (with errors) from the message, find out what the errors # are thanks to hamming, and generate an error syndrome (basically an array # that says "here's where the errors are") extracted_msg = extract(blurred_img) decoded = decode(extracted_msg) decoded_str = bin_to_str(decoded) print("") print("Decoded string:") print(decoded_str[:msg_len]) print("") syndrome = syndrome(extracted_msg) #print("") #print("Syndrome:") #print(syndrome.T[:100]) #print("") # using the syndrome, correct the errors in the message, then decode the
def main(noiseRatio): packets = open('../data/packets.txt', 'r') # CRC VARIABLES crcTransmissions = 0 crcRetransmissions = 0 crcUndetectedErrors = 0 # HAMMING VARIABLES hammingTransmissions = 0 hammingRetransmissions = 0 hammingCorrections = 0 hammingUndetectedErrors = 0 # PACKET ANALYSIS for packet in packets: packet = packet[:len(packet)-1] # Remove the return carriage # CRC success = False crcEncodedPacket = crc.encode(packet) while not success: # Continue until the packet is accurately received crcNoisePacket = noise.gaussian(crcEncodedPacket, noiseRatio) crcTransmissions += 1 success = True if crc.decode(crcNoisePacket) == False: # If error(s) exist crcRetransmissions += 1 success = False elif crcEncodedPacket != crcNoisePacket: # Error occured and CRC didn't catch it crcUndetectedErrors += 1 # HAMMING success = False hammingEncodedPacket = hamming.encode(packet) while not success: # Continue until the packet is accurately received hammingNoisePacket = noise.gaussian(hammingEncodedPacket, noiseRatio) hammingTransmissions += 1 success = True decodedHammingPacket = hamming.decode(hammingNoisePacket) if not decodedHammingPacket: # Hamming decode failed - too many bit flips hammingRetransmissions += 1 success = False elif hammingNoisePacket != hammingEncodedPacket: # If a bit(s) was flipped & the result came back as true hammingCorrections += 1 # SUMMARY print "\n" print "NOISE RATIO: %s\n" % noiseRatio print "CRC ANALYSIS:" print "\tTransmissions: "+str(crcTransmissions) retransmissionRate = round(float(crcRetransmissions)/float(crcTransmissions)*100, 2) print "\tRetransmissions: "+str(crcRetransmissions)+" ~ "+str(retransmissionRate)+"%" print "\tUndetected Errors: "+str(crcUndetectedErrors) print "\n" print "HAMMING ANALYSIS" print "\tTransmissions: "+str(hammingTransmissions) retransmissionRate = round(float(hammingRetransmissions)/float(hammingTransmissions)*100, 2) print "\tRetransmissions: "+str(hammingRetransmissions)+" ~ "+str(retransmissionRate)+"%" print "\tCorrected Packets: "+str(hammingCorrections) print "\tUndetected Errors: "+str(hammingUndetectedErrors) print "\n" # Write data to file with open("../data/crcOut.txt",'a') as fout: fout.write("(%s,%s)"%(noiseRatio,round(float(crcRetransmissions)/float(crcTransmissions)*100, 2))) with open("../data/hammingOut.txt",'a') as fout: fout.write("(%s,%s)"%(noiseRatio,round(float(hammingRetransmissions)/float(hammingTransmissions)*100, 2)))
data_bit = bitarray.bitarray() data_bit.frombytes(data) print(data) # Error detection algorithm TODO # Error corection algorithm if hamming_bool: # Error check data_b_str = data_bit.to01() data_b_str = errorcheck(data_b_str) # Decoding data_b_str = decode(data_b_str) if data_b_str == 'M': print('Multiple errors detected') else: data_bit.clear() data_bit.extend(data_b_str) to_transform = bitarray.bitarray() to_transform.extend(data_b_str) # To normal string #data_str = to_transform.tobytes().decode('utf-8')
for i in range(10): lst = generate_bits(1000000) coded_lst = code_triple(lst) output = gilbert(coded_lst, *parameter_list) decoded_lst = decode_triple(output) #ber_list.append(ber_triple(lst, decoded_lst)) sum1 += ber_triple(lst, decoded_lst) #plt.plot(ber_list, [3], label='Kodowanie potrojeniowe', marker='o') # ======================================================================== ber_list = [] hamming_encoded = hamming.encode(lst) output_hamming = gilbert(hamming_encoded, *parameter_list) hamming_decoded = hamming.decode(output_hamming) #ber_list.append(ber_triple(lst, hamming_decoded)) sum2 += ber_triple(lst, hamming_decoded) #plt.plot(ber_list, [2], label='Kodowanie Hamminga(8, 4)', marker='o') # ======================================================================== packet_size = 1007 t = 10 redundancy = 1120 / 1007 ber_list = [] chunks = [ lst[x:x + packet_size] for x in range(0, len(lst), packet_size) ] bch_decoded_all2 = []
from hamming import encode,decode from bitarray import bitarray data = bitarray('1111') data_with_parity = encode(data) print(data_with_parity) data_with_parity[3] = not data_with_parity data_with_parity[4] = not data_with_parity print(decode(data_with_parity))
if __name__ == "__main__": row = [ 'original_msg', 'result_msg', 'repaired', 'correct' ] rows = [] for i in range(0, 999): m = 'dsadAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaBBBBBBBBBBBBBBBBBBBb' # Add error result_msg, original_msg, real_msg = hamming_message(m) if to_string(real_msg) == to_string(result_msg): correct = True else: correct = False # Make row rows.append([ to_string(original_msg), to_string(result_msg), False if decode(result_msg) != original_msg else True, correct ]) with open('hamming.csv', 'w') as writeFile: writer = csv.writer(writeFile) writer.writerows(rows) writeFile.close()
numTrans = 0 numRT = 0 badReads = 0 corrections = 0 # Hamming - Gaussian with open('../data/packets.txt', 'r') as fin: for packet in fin: packet = packet.strip() hammingEncodedPacket = hamming.encode(packet) success = False while not success: hammingNoisePacket = noise.gaussian(hammingEncodedPacket, noiseRatio) numTrans += 1 hammingDecodedPacket = hamming.decode(hammingNoisePacket) if hammingEncodedPacket == hammingNoisePacket: # No interference success = True elif hammingDecodedPacket == packet: # Correction was good success = True corrections += 1 elif hammingDecodedPacket == False: # Could not correct numRT += 1 else: # Bad correction badReads += 1 success = True rowData["Hamming RT G"] = round(float(numRT) / numTrans, 4) gaussianRow["Hamming T"] = numTrans gaussianRow["Hamming RT"] = numRT
def hamming_recieve(data): print("MENSAJE: ", to_string(decode(data)))
# To change the probability of error change this value prob_err=float(1/12) # With the input we will add 0's the end to ensure we have groups of 4. a = list(input("Enter a bit string: ")) a = [int(j) for j in a] a = split(a, 4) print(f"Original Message: {[j for sub in a for j in sub]}") # Encode the messages encoded_msg = [] for j in range(len(a)): encoded_msg += hamming.encode(a[j]) # Simulate error err_msg = sim_error(encoded_msg, p_err=prob_err) print(f"Encoded Message: {encoded_msg}\n") print(f"Encoded Message with error: {err_msg}") print(f"The Error: {[j ^ k for j, k in zip(encoded_msg, err_msg)]}\n") # Decode err_msg_chuncked = split(err_msg, 7) decoded_msg = [] for j in range(len(err_msg_chuncked)): decoded_msg += hamming.decode(err_msg_chuncked[j]) print(f"Decoded Message: {decoded_msg}") print(f"Decoded Correctly: {decoded_msg == [j for sub in a for j in sub]}")
def detect_markers(img, marker_ids=None): width, height, _ = img.shape gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) edges = cv2.Canny(gray, 10, 100) _, contours, _ = cv2.findContours(edges.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) # We only keep the big enough contours #min_area = width * height * .01 min_area = width * height * .0001 contours = [c for c in contours if cv2.contourArea(c) > min_area] warped_size = 9 * 10 canonical_marker_coords = array( ((0, 0), (warped_size - 1, 0), (warped_size - 1, warped_size - 1), (0, warped_size - 1)), dtype='float32') markers = [] for c in contours: approx_curve = cv2.approxPolyDP(c, len(c) * 0.01, True) if not (len(approx_curve) == 4 and cv2.isContourConvex(approx_curve)): continue sorted_curve = array(cv2.convexHull(approx_curve, clockwise=False), dtype='float32') persp_transf = cv2.getPerspectiveTransform(sorted_curve, canonical_marker_coords) warped_img = cv2.warpPerspective(img, persp_transf, (warped_size, warped_size)) warped_gray = cv2.cvtColor(warped_img, cv2.COLOR_BGR2GRAY) _, warped_bin = cv2.threshold(warped_gray, 50, 255, cv2.THRESH_BINARY) print(marker_size) print(warped_size) warped_marker = int(warped_size / marker_size) print(warped_marker) marker = warped_bin.reshape( [marker_size, warped_marker, marker_size, warped_marker]) marker = marker.mean(axis=3).mean(axis=1) marker[marker < 127] = 0 marker[marker >= 127] = 1 # Eliminate the entirely black or entirely white markers # for robustness purposes sub_marker = marker[1:-1, 1:-1] sub_size = marker_size - 2 if (all(sub_marker == zeros((sub_size, sub_size))) or all(sub_marker == ones((sub_size, sub_size)))): continue for _ in range(4): try: code = decode(sub_marker).flatten()[::-1] id = (2**find(code == 1)).sum() markers.append( HammingMarker(id=id, contours=approx_curve, img_size=(width, height))) except ValueError: # The hamming code is incorrect pass sub_marker = rot90(sub_marker) # Remove duplicates markers = {m.id: m for m in markers}.values() return markers