def get_error_sum(file, number_of_chunks, chunk_size, seq_seed=None, while_count=1000): max_seed = np.power(2, 8 * struct.calcsize(SEED_LEN_FORMAT)) dist = RaptorDistribution(number_of_chunks) dna_rules = FastDNARules() error_correction = lambda x: reed_solomon_encode(x, NO_REPAIR_SYMBOLS) encoder = RU10Encoder( file, number_of_chunks, dist, chunk_size=chunk_size, insert_header=INSERT_HEADER, rules=dna_rules, error_correction=error_correction, id_len_format=SEED_LEN_FORMAT, number_of_chunks_len_format=NUMBER_OF_CHUNKS_LEN_FORMAT, save_number_of_chunks_in_packet=save_number_of_chunks_in_packet, prepend="", append="") encoder.prepare() i = 0 res = [] while i < while_count: if seq_seed is not None: if seq_seed + i >= max_seed: break packet = encoder.create_new_packet(seed=seq_seed + i) else: packet = encoder.create_new_packet() should_drop_packet(dna_rules, packet) res.append(packet.error_prob) return res
def test_suite5(as_dna, chunk_size, dna_rules, error_correction, headerchunk, decoder_instance): chunksize = chunk_size number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size( file2, chunksize) dist = RaptorDistribution(number_of_chunks) pseudo_decoder = decoder_instance.pseudo_decoder( number_of_chunks=number_of_chunks) rules = dna_rules if as_dna else None encoder = RU10Encoder(file2, number_of_chunks, dist, pseudo_decoder=pseudo_decoder, rules=rules, error_correction=error_correction, insert_header=headerchunk) encoder.encode_to_packets() encoder.encode_to_packets() encoder.save_packets(split_to_multiple_files=True, save_as_dna=as_dna) assert (pseudo_decoder.is_decoded() and pseudo_decoder.getSolvedCount() == pseudo_decoder.number_of_chunks) assert os.path.exists(out_dir2) decoder = decoder_instance(out_dir2, use_headerchunk=headerchunk, error_correction=error_correction) decoder.decode() assert decoder.is_decoded() and decoder.getSolvedCount( ) == encoder.number_of_chunks os.remove(file2) decoder.saveDecodedFile(print_to_output=True, null_is_terminator=True) assert os.path.exists('DEC_RU10_' + file2) and filecmp.cmp( 'DEC_RU10_' + file2, cmp_file2) shutil.rmtree(out_dir2)
def test_suite2(as_dna, chunk_size, dna_rules, error_correction_pair): chunksize = chunk_size number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size( file, chunksize) dist = RaptorDistribution(number_of_chunks) decoder_instance = RU10Decoder pseudo_decoder = decoder_instance.pseudo_decoder( number_of_chunks=number_of_chunks) rules = dna_rules if as_dna else None encoder = RU10Encoder( file, number_of_chunks, dist, pseudo_decoder=pseudo_decoder, rules=rules, error_correction=error_correction_pair[0], ) encoder.encode_to_packets() encoder.save_packets(split_to_multiple_files=True, save_as_dna=as_dna) assert (pseudo_decoder.is_decoded() and pseudo_decoder.getSolvedCount() == pseudo_decoder.number_of_chunks) assert os.path.exists(out_dir) decoder = decoder_instance(out_dir, error_correction=error_correction_pair[1]) decoder.decode() assert decoder.is_decoded() and decoder.getSolvedCount( ) == encoder.number_of_chunks os.remove(file) decoder.saveDecodedFile(print_to_output=False) assert os.path.exists(file) and filecmp.cmp(file, cmp_file) shutil.rmtree(out_dir)
def encode(file, dist_lst, asdna=True, chunk_size=50): """ :param file: :param dist_lst: :param asdna: :param chunk_size: :return: """ packets_needed = 0 packets = dict() number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size(file, chunk_size) dist = RaptorDistribution(number_of_chunks) dist.f = dist_lst d = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40] dist.d = d dna_rules = FastDNARules() if asdna: rules = dna_rules else: rules = None x = RU10Encoder(file, number_of_chunks, dist, chunk_size=chunk_size, insert_header=False, rules=rules, error_correction=nocode, id_len_format="H", number_of_chunks_len_format="B", save_number_of_chunks_in_packet=False, mode_1_bmp=False) x.prepare() y = RU10Decoder.pseudo_decoder(x.number_of_chunks, False) if y.distribution is None: # self.isPseudo and y.distribution = RaptorDistribution(x.number_of_chunks) y.distribution.f = dist_lst y.distribution.d = d y.number_of_chunks = x.number_of_chunks _, y.s, y.h = intermediate_symbols(x.number_of_chunks, y.distribution) y.createAuxBlocks() n = 0 for p_tmp in range(45): packets[p_tmp] = list() while n < number_of_chunks * 50: pack = x.create_new_packet() if packets_needed == 0: y.input_new_packet(pack) should_drop_packet(dna_rules, pack) if pack.get_degree() not in packets: packets[pack.get_degree()] = list() packets[pack.get_degree()].append(pack.error_prob) n += 1 if n >= number_of_chunks and y.is_decoded() and packets_needed == 0: packets_needed = n # we dont want to break, we want to generate #chunks * XXX packets! # break print("Packets created: " + str(sum([len(x) for x in packets.values()]))) return packets, (packets_needed - number_of_chunks) / 100.0
def create_packets_e_prob(start_num: int, normed_dist: ndarray, number_of_packets: int, rules=None): dist_obj = RaptorDistribution(__NUM_CHUNKS) dist_obj.f = normed_dist dist_obj.d = [x for x in range(0, 41)] encoder = RU10Encoder(file=__FILE, number_of_chunks=__NUM_CHUNKS, distribution=dist_obj, insert_header=False) encoder.prepare() if rules is None: rules = FastDNARules() packets_e_prob = [] for i in range(start_num, start_num + number_of_packets): packet = encoder.create_new_packet(seed=i) should_drop_packet(rules, packet) packets_e_prob.append(packet.error_prob) del packet del encoder return packets_e_prob
def encode(self, file, asdna=True, error_correction=nocode, insert_header=False, save_number_of_chunks_in_packet=False, mode_1_bmp=False, chunk_size=50): packets_needed = 0 packets = dict() number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size(file, chunk_size) dist = RaptorDistribution(number_of_chunks) dist.f = self.X dist.d = self.d dna_rules = FastDNARules() if asdna: rules = dna_rules else: rules = None x = RU10Encoder(file, number_of_chunks, dist, chunk_size=chunk_size, insert_header=insert_header, rules=rules, error_correction=error_correction, id_len_format="H", number_of_chunks_len_format="B", save_number_of_chunks_in_packet=save_number_of_chunks_in_packet, mode_1_bmp=mode_1_bmp) x.prepare() y = RU10Decoder.pseudo_decoder(x.number_of_chunks, False) if y.distribution is None: # self.isPseudo and y.distribution = RaptorDistribution(x.number_of_chunks) y.distribution.f = self.X y.distribution.d = self.d y.number_of_chunks = x.number_of_chunks _, y.s, y.h = intermediate_symbols(x.number_of_chunks, y.distribution) y.createAuxBlocks() n = 0 for p_tmp in range(45): packets[p_tmp] = list() while n < number_of_chunks * 50: pack = x.create_new_packet() if packets_needed == 0: y.input_new_packet(pack) should_drop_packet(dna_rules, pack) if pack.get_degree() not in packets: packets[pack.get_degree()] = list() packets[pack.get_degree()].append(pack.error_prob) n += 1 if n >= number_of_chunks and y.is_decoded() and packets_needed == 0: packets_needed = n # we dont want to break, we want to generate #chunks * XXX packets! # break print("Packets created: " + str(sum([len(x) for x in packets.values()]))) return packets, (packets_needed - number_of_chunks) / 100.0
def encode(file, chunk_size, dist, as_dna=True, repeats=15): """ Encodes the file to packets until the pseudo decoder was able to decode it 'repeats' times with the given chunk size and the distribution list. :param file: File to encode. :param chunk_size: Chunksize to use. :param dist: The distribution to calculate the average error and overhead for. :param as_dna: If true uses the DNA Rules. :param repeats: Number of En-/Decoding cycles. :return: """ degree_dict = {} overhead_lst = [] number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size(file, chunk_size, insert_header=False) distribution = RaptorDistribution(number_of_chunks) distribution.f = dist distribution.d = [x for x in range(0, 41)] if as_dna: rules = FastDNARules() else: rules = None encoder = RU10Encoder(file, number_of_chunks, distribution, insert_header=False, rules=rules, error_correction=nocode, id_len_format="H", number_of_chunks_len_format="B", save_number_of_chunks_in_packet=False, mode_1_bmp=False) encoder.prepare() for _ in range(0, repeats): encoder.random_state = np.random.RandomState() # print("Master-Seed used: " + str(encoder.random_state.get_state()[1][0])) pseudo_decoder = create_pseudo_decoder(encoder.number_of_chunks, distribution) needed_packets = 0 while pseudo_decoder.GEPP is None or not pseudo_decoder.is_decoded(): needed_packets += 1 packet = encoder.create_new_packet() pseudo_decoder.input_new_packet(packet) should_drop_packet(rules, packet) if packet.get_degree() not in degree_dict: degree_dict[packet.get_degree()] = list() degree_dict[packet.get_degree()].append(min(packet.error_prob, 1.0)) overhead = (needed_packets - encoder.number_of_chunks) / 100.0 overhead_lst.append(overhead) return sum(overhead_lst) / len(overhead_lst), degree_dict
def test_suite4(as_dna, chunk_size, dna_rules, error_correction): chunksize = chunk_size number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size( file, chunksize) dist = RaptorDistribution(number_of_chunks) decoder_instance = RU10Decoder pseudo_decoder = decoder_instance.pseudo_decoder( number_of_chunks=number_of_chunks) rules = dna_rules if as_dna else None encoder = RU10Encoder(file, number_of_chunks, dist, pseudo_decoder=pseudo_decoder, rules=rules, error_correction=error_correction[0]) encoder.encode_to_packets() encoder.save_packets(split_to_multiple_files=True, save_as_dna=as_dna) assert (pseudo_decoder.is_decoded() and pseudo_decoder.getSolvedCount() == pseudo_decoder.number_of_chunks) assert os.path.exists(out_dir) # do not delete all packets (and break the last one). # that way the GEPP inside the decoder will get initialized and we might not end in a race-condition for # decoder.decode() sometimes raising an Exception.. for i in range(2, number_of_chunks): tmp_path = "RU10_" + file + "/" + str(i) + ".RU10_DNA" os.remove(tmp_path) with open("RU10_" + file + "/0.RU10_DNA", 'rb+') as tmp_file: # TODO we should flip bits in the middle rather than deleting 4 bytes at the end (we store crc32 / reedsolomon at the end) tmp_file.seek(-4, os.SEEK_END) tmp_file.truncate() decoder = decoder_instance(out_dir, error_correction=error_correction[1]) decoder.decode() assert decoder.corrupt == 1 assert not decoder.is_decoded() os.remove(file) with pytest.raises(AssertionError): decoder.saveDecodedFile(print_to_output=False, partial_decoding=False) assert not (os.path.exists(file) and filecmp.cmp(file, cmp_file)) shutil.rmtree(out_dir)
def test_suite(as_dna, decoder_instance): dir_path = os.getcwd() try: os.remove(dir_path + "/" + file) except: print("Not deleting, File did not exists") shutil.copyfile(dir_path + "/" + cmp_file, dir_path + "/" + file) print(as_dna) chunksize = 200 number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size( file, chunksize) dist = RaptorDistribution(number_of_chunks) pseudo_decoder = decoder_instance.pseudo_decoder( number_of_chunks=number_of_chunks) rules = FastDNARules() if as_dna else None encoder = RU10Encoder(file, number_of_chunks, dist, pseudo_decoder=pseudo_decoder, rules=rules, id_len_format="H", number_of_chunks_len_format="H", insert_header=True) encoder.encode_to_packets() encoder.save_packets(split_to_multiple_files=True, save_as_dna=as_dna) assert (pseudo_decoder.is_decoded() and pseudo_decoder.getSolvedCount() == pseudo_decoder.number_of_chunks) assert os.path.exists(out_dir) decoder = decoder_instance(out_dir) decoder.decodeFolder(id_len_format="H", number_of_chunks_len_format="H") if isinstance(decoder, RU10BPDecoder): for pack in encoder.encodedPackets: decoder.input_new_packet(pack) assert decoder.is_decoded() and decoder.getSolvedCount( ) == encoder.number_of_chunks os.remove(file) decoder.saveDecodedFile(print_to_output=False) assert os.path.exists(file) and filecmp.cmp(file, cmp_file) shutil.rmtree(out_dir)
index.append(key) data.append(val) fig, (ax) = plt.subplots(ncols=1) ax.boxplot(data) ax.set_xticklabels(index) plt.show() plt.plot(num_list) plt.plot(mean_list) plt.show() if __name__ == "__main__": file = "../.INFILES/Dorn" chunk_size = 100 norepairsymbols = 6 save_number_of_chunks_in_packet = False insert_header = False rules = FastDNARules() error_correction = lambda x: reed_solomon_encode(x, norepairsymbols) number_of_chunks = 50 if chunk_size != 0: number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size(file, chunk_size) dist = RaptorDistribution(number_of_chunks) x = RU10Encoder(file, number_of_chunks, dist, chunk_size=chunk_size, insert_header=insert_header, rules=rules, error_correction=error_correction, id_len_format="H", number_of_chunks_len_format="B", save_number_of_chunks_in_packet=save_number_of_chunks_in_packet) aa = QualityPacketGen(x)
if __name__ == "__main__": file = "logo.jpg" number_of_chunks = 400 insert_header = True rules = FastDNARules() norepair_symbols = 4 error_correction_str = "reedsolomon" error_correction = get_error_correction_encode(error_correction_str, norepair_symbols) PACKET_LEN_FORMAT = "I" CRC_LEN_FORMAT = "I" NUMBER_OF_CHUNKS_LEN_FORMAT = "I" ID_LEN_FORMAT = "I" save_number_of_chunks_in_packet = False upper_bound = 0.9 encoder = RU10Encoder(file, number_of_chunks, RaptorDistribution(number_of_chunks=number_of_chunks), insert_header=insert_header, pseudo_decoder=None, chunk_size=0, rules=rules, error_correction=error_correction, packet_len_format=PACKET_LEN_FORMAT, crc_len_format=CRC_LEN_FORMAT, number_of_chunks_len_format=NUMBER_OF_CHUNKS_LEN_FORMAT, id_len_format=ID_LEN_FORMAT, save_number_of_chunks_in_packet=save_number_of_chunks_in_packet, mode_1_bmp=False, prepend="", append="", drop_upper_bound=upper_bound) encoder.set_overhead_limit(0.40) # encoder.encode_to_packets() save_prefix = file + "_" + str(number_of_chunks) + "_" + ("H" if insert_header else "") + "_" + \ ("R" if rules is not None else "") + "_" + error_correction_str + "_" + str(norepair_symbols) + \ "_" + PACKET_LEN_FORMAT + "_" + CRC_LEN_FORMAT + "_" + NUMBER_OF_CHUNKS_LEN_FORMAT + "_" + \ ID_LEN_FORMAT + "_" + ("NUMC" if save_number_of_chunks_in_packet else "") + "_" + str(upper_bound) # plot_chunks_count_in_packets(list(encoder.encodedPackets), save_prefix) plot_from_csv("logo.jpg_400_H_R_reedsolomon_4_I_I_I_I__0.9_2021-01-20_12-40-52.csv")
res = p.map(self.send_packets_on_interface, [x for x in zip(packets, sockets, dests)]) """ # res = [self.send_packets_on_interface(x) for x in zip(packets, sockets, dest)] i = 0 for iface, dest in zip(interfaces, dests): socket = self.create_ip_socket(interface=iface, broadcast=broadcast) self.send_packets_on_interface((packets[i], socket, dest)) """ return res if __name__ == "__main__": file = "../.INFILES/logo.jpg" chunk_size = 100 number_of_chunks = RU10Encoder.get_number_of_chunks_for_file_with_chunk_size( file, chunk_size) INSERT_HEADER = True NUM_IN_PACKER = True dist = RaptorDistribution(number_of_chunks) dna_rules = None # FastDNARules() BROADCAST = False error_correction = reed_solomon_encode """ encoder = RU10Encoder(file, number_of_chunks, dist, chunk_size=chunk_size, insert_header=INSERT_HEADER, rules=None, error_correction=error_correction, id_len_format="I", number_of_chunks_len_format="I", save_number_of_chunks_in_packet=NUM_IN_PACKER, mode_1_bmp=False) encoder.set_overhead_limit(1.00) encoder.encode_to_packets() """ m = MultiInterfaceFountain()