Пример #1
0
def blackboxTest(file,
                 number_of_chunks=800,
                 droprate=0.02,
                 seed=2,
                 overhead=0.05):
    print("#### Starting Blackbox Test with " + str(number_of_chunks) +
          " Chunks and a droprate of " + str(droprate) + " ####")
    start = time.time()
    dist = RobustSolitonDistribution(S=number_of_chunks, seed=seed)
    # dist = IdealSolitonDistribution(S=number_of_chunks, seed=seed)

    # pseudo = LTBPDecoder.pseudo_decoder(number_of_chunks)
    encoder = LTEncoder(file, number_of_chunks,
                        dist)  # , pseudo_decoder=pseudo)
    decoder = LTDecoder.pseudo_decoder(number_of_chunks,
                                       read_all_before_decode=True)
    encoder.set_overhead_limit(overhead)
    result, numberOfEncodedPackets, dropedCount, solvedCount = blackbox(
        encoder, decoder, droprate=droprate)
    end = time.time() - start
    print("Blackbox-Decode " + ("successful" if result else "NOT successful") +
          " after " + str(round(end, 4)) + " sec.")
    return [
        dist.get_config_string(),
        result,
        numberOfEncodedPackets,
        dropedCount,
        solvedCount,
        round(end, 4),
        number_of_chunks,
    ]
Пример #2
0
def blackboxLTTest(file,
                   number_of_chunks=800,
                   seed=2,
                   chunk_size=0,
                   overhead=0.20,
                   scale=1.0):
    print(bcolors.OK + "Starting Blackbox Test with " + str(number_of_chunks) +
          " Chunks" + bcolors.ENDC)
    start = time.time()
    dist = RobustSolitonDistribution(S=number_of_chunks, seed=seed)
    algo_type.clear()
    algo_type.append("LT_" + str(number_of_chunks) + "_" +
                     str(dist.get_config_string()))
    encoder = LTEncoder(file, number_of_chunks, dist, chunk_size=chunk_size)
    encoder.set_overhead_limit(overhead)
    decoder = LTDecoder.pseudo_decoder(number_of_chunks)
    decoder.set_read_all_before_decode(True)

    result, dec_input, invalid_drop = blackbox(encoder, decoder, scale=scale)
    end = time.time() - start
    print(bcolors.BLUE + "Blackbox-Decode " +
          (bcolors.OK + "successful" if result else bcolors.ERR +
           "NOT successful") + bcolors.END + bcolors.BLUE + " after " +
          str(round(end, 4)) + " sec." + bcolors.ENDC)
    return [
        dist.get_config_string(),
        result,
        number_of_chunks,
        dec_input,
        invalid_drop,
        round(end, 4),
    ]
Пример #3
0
 def decode(file,
            error_correction=nocode,
            null_is_terminator=False,
            mode_1_bmp=False,
            number_of_chunks=STATIC_NUM_CHUNKS,
            use_header_chunk=False,
            id_len_format=ID_LEN_FORMAT,
            number_of_chunks_len_format=NUMBER_OF_CHUNKS_LEN_FORMAT,
            packet_len_format=PACKET_LEN_FORMAT,
            crc_len_format=CRC_LEN_FORMAT,
            read_all=READ_ALL_BEFORE_DECODER,
            distribution_cfg_str=""):
     dist = ErlichZielinskiRobustSolitonDistribution(number_of_chunks,
                                                     seed=2)
     if distribution_cfg_str != "":
         # parse distribution_cfg_str and create distribution with the defined settings...
         splt = distribution_cfg_str.split("_")
         mode = splt[1]
         if mode == "ErlichZielinskiRobustSoliton":
             k = int(splt[2].split("=")[1])
             delta = float(splt[3].split("=")[1])
             c = float(splt[4].split("=")[1])
             dist = ErlichZielinskiRobustSolitonDistribution(k,
                                                             delta,
                                                             c,
                                                             seed=2)
         elif mode == "RobustSoliton":
             k = int(splt[2].split("=")[1])
             delta = float(splt[3].split("=")[1])
             dist = RobustSolitonDistribution(number_of_chunks,
                                              k,
                                              delta,
                                              seed=2)
         elif mode == "IdealSoliton":
             dist = IdealSolitonDistribution(number_of_chunks, seed=2)
     """try:
         decoder = LTBPDecoder(file, error_correction=error_correction, use_headerchunk=HEADER_CHUNK,
                                   static_number_of_chunks=STATIC_NUM_CHUNKS, implicit_mode=IMPLICIT_MODE, dist=dist)
         print("[1/2] Approximation Decode")
         _internal(decoder)
         if not decoder.is_decoded():
             print("[2/2] Approximation Decode")
             _internal(decoder)
         decoder.saveDecodedFile(null_is_terminator=NULL_IS_TERMINATOR)
     except Exception as e:"""
     print("[X/2] Falling back to Gauss-Mode")
     print("Falling back to Gauss-Mode")
     decoder = LTDecoder(file,
                         error_correction=error_correction,
                         use_headerchunk=use_header_chunk,
                         static_number_of_chunks=number_of_chunks,
                         implicit_mode=IMPLICIT_MODE,
                         dist=dist)
     decoder.read_all_before_decode = read_all
     decoder.decode(number_of_chunks_len_format=number_of_chunks_len_format,
                    seed_len_format=id_len_format,
                    degree_len_format="H")
     decoder.solve()
     decoder.saveDecodedFile(null_is_terminator=null_is_terminator,
                             print_to_output=PRINT_TO_OUTPUT)
Пример #4
0
def vergleich():
    S = 50
    for a in ["log", ""]:
        delt = 0.1
        robust = RobustSolitonDistribution(S=S, K=8, delta=delt, seed=0)
        print(robust.pre_comp_dist)
        if a == "log":
            plt.semilogy([0.0] + robust.pre_comp_dist,
                         label="K=8,   $\delta$ = 0.1")
        else:
            plt.plot([0.0] + robust.pre_comp_dist,
                     label="K=8,   $\delta$ = 0.1")

        robust = RobustSolitonDistribution(S=S, K=8, delta=1.0, seed=0)
        print(robust.pre_comp_dist)
        if a == "log":
            plt.semilogy([0.0] + robust.pre_comp_dist,
                         label="K=8,   $\delta$ = 1.0")
        else:
            plt.plot([0.0] + robust.pre_comp_dist,
                     label="K=8,   $\delta$ = 1.0")

        robust = RobustSolitonDistribution(S=S, K=15, delta=0.5, seed=0)
        print(robust.pre_comp_dist)
        if a == "log":
            plt.semilogy([0.0] + robust.pre_comp_dist,
                         label="K=15, $\delta$ = 0.5")
        else:
            plt.plot([0.0] + robust.pre_comp_dist,
                     label="K=15, $\delta$ = 0.5")

        plt.ylabel("Probability")
        plt.xlabel("Degree")
        manager = plt.get_current_fig_manager()
        # manager.resize(*manager.window.maxsize())
        plt.grid(True)
        plt.tight_layout()
        plt.legend()
        plt.show(block=False)
        plt.savefig(
            "../plotDists/Vergleich_RobustSolitonS50_K8_delta" + str(delt) +
            "_" + a + ".pdf",
            bbox_inches="tight",
        )
        plt.savefig("../plotDists/Vergleich_RobustSolitonS50_K8_delta" +
                    str(delt) + "_" + a + ".svg")
        plt.close()
Пример #5
0
def blackboxLTTest(file, number_of_chunks=800, seed=2, chunk_size=0):
    print("Starting Blackbox Test with " + str(number_of_chunks) + " Chunks")
    start = time.time()
    dist = RobustSolitonDistribution(S=number_of_chunks, seed=seed)
    pseudo = LTBPDecoder.pseudo_decoder(number_of_chunks)
    encoder = LTEncoder(file,
                        number_of_chunks,
                        dist,
                        pseudo_decoder=pseudo,
                        chunk_size=chunk_size)
    decoder = LTDecoder.pseudo_decoder(number_of_chunks)

    result, numberOfEncodedPackets, dropedCount = blackbox(encoder, decoder)
    end = time.time() - start
    print("Blackbox-Decode " + ("successful" if result else "NOT successful") +
          " after " + str(round(end, 4)) + " sec.")
    return [
        dist.get_config_string(),
        result,
        numberOfEncodedPackets,
        dropedCount,
        round(end, 4),
        number_of_chunks,
    ]
Пример #6
0
def main():
    S = 50
    for a in ["log", ""]:
        for delt in np.arange(0.1, 1.1, 0.1):
            robust = RobustSolitonDistribution(S=S, K=8, delta=delt, seed=0)
            print(robust.pre_comp_dist)
            if a == "log":
                plt.semilogy([0.0] + robust.pre_comp_dist)
            else:
                plt.plot([0.0] + robust.pre_comp_dist)
            plt.ylabel("Probability")
            plt.xlabel("Degree")
            manager = plt.get_current_fig_manager()
            # manager.resize(*manager.window.maxsize())
            plt.grid(True)
            plt.tight_layout()
            plt.show(block=False)
            plt.savefig(
                "../plotDists/RobustSolitonS50_K8_delta" + str(delt) + "_" +
                a + ".pdf",
                bbox_inches="tight",
            )
            plt.savefig("../plotDists/RobustSolitonS50_K8_delta" + str(delt) +
                        "_" + a + ".svg")

            plt.close()

    S = 50
    for a in ["log", ""]:
        robust = IdealSolitonDistribution(S=S, seed=0)
        print(robust.pre_comp_dist)
        if a == "log":
            plt.semilogy([0.0] + robust.pre_comp_dist)
        else:
            plt.plot([0.0] + robust.pre_comp_dist)
        plt.ylabel("Probability")
        plt.xlabel("Degree")
        manager = plt.get_current_fig_manager()
        manager.resize(*manager.window.maxsize())
        plt.grid(True)
        plt.tight_layout()
        plt.show(block=False)
        plt.savefig("../plotDists/IdealSolitonS50_" + a + ".pdf",
                    bbox_inches="tight")
        plt.savefig("../plotDists/IdealSolitonS50_" + a + ".svg")

        plt.close()
Пример #7
0
    def encode(file,
               error_correction=nocode,
               insert_header=INSERT_HEADER,
               save_number_of_chunks=NUMBER_OF_CHUNKS_IN_PACKET,
               save_as_fasta=True,
               save_as_zip=True,
               overhead=5.0,
               upper_bound=1.0):
        number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size(
            file, chunk_size=CHUNK_SIZE, insert_header=insert_header)
        print("Number of Chunks=%s" % number_of_chunks)
        #dist = ErlichZielinskiRobustSolitonDistribution(number_of_chunks, seed=2)
        #dist = IdealSolitonDistribution(number_of_chunks, seed=2)
        dist = RobustSolitonDistribution(number_of_chunks, seed=2)
        encoder = LTEncoder(
            file,
            number_of_chunks,
            dist,
            insert_header=insert_header,
            rules=DNARules_ErlichZielinski(),
            error_correction=error_correction,
            number_of_chunks_len_format="H",
            id_len_format="H",
            used_packets_len_format="H",
            save_number_of_chunks_in_packet=save_number_of_chunks,
            implicit_mode=IMPLICIT_MODE,
            drop_upper_bound=upper_bound)
        encoder.set_overhead_limit(overhead)
        encoder.encode_to_packets()
        if save_as_fasta:
            encoder.save_packets_fasta(file_ending="_LT",
                                       seed_is_filename=True)
        elif save_as_zip:
            encoder.save_packets_zip(save_as_dna=True,
                                     file_ending="_LT",
                                     seed_is_filename=True)
        else:
            encoder.save_packets(True,
                                 save_as_dna=True,
                                 seed_is_filename=True,
                                 clear_output=True)

        encoder.save_packets(split_to_multiple_files=True, save_as_dna=True)
        print("Number of Chunks=%s" % encoder.number_of_chunks)
        return encoder
Пример #8
0
def main():
    file = "../.INFILES/b_lq.webm"

    for number_of_chunks in [500, 600, 700, 800, 900, 1000, 1100, 1200]:
        # for epsilon in [0.03]:
        for overhead in np.arange(0.05, 0.50, 0.01):
            for _ in range(3):
                dist = RobustSolitonDistribution(S=number_of_chunks)
                encoder = LTEncoder(file, number_of_chunks, dist)
                encoder.set_overhead_limit(overhead)
                encoder.encode_to_packets()
                aprrox_decoder = LTBPDecoder(None)
                start = time.time()
                for packet in encoder.encodedPackets:
                    aprrox_decoder.input_new_packet(packet)
                aprrox_decoder.solve()
                end = time.time() - start
                print("Approx.," + file + "," + str(number_of_chunks) + "," +
                      str(len(encoder.encodedPackets)) + "," +
                      str(aprrox_decoder.is_decoded()) + "," + str(end))
Пример #9
0
def main(file):
    print("###### LT Codec number_of_chunks = 700 ######")
    print("##### Ideal Soliton Distribution #####")
    print("### LT without prioritized Packets ###")
    start = time.time()
    number_of_chunks = 700
    dist = IdealSolitonDistribution(number_of_chunks, seed=2)
    encoder = LTEncoder(file, number_of_chunks, dist)
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("### LT without prioritized Packets with pseudo_decoder ###")
    start = time.time()
    number_of_chunks = 700
    dist = IdealSolitonDistribution(number_of_chunks, seed=2)
    pseudo = LTBPDecoder.pseudo_decoder(number_of_chunks)
    encoder = LTEncoder(file, number_of_chunks, dist, pseudo_decoder=pseudo)
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("### LT with prioritized Packets ###")
    start = time.time()
    number_of_chunks = 700
    dist = IdealSolitonDistribution(number_of_chunks, seed=2)
    encoder = LTEncoder(file,
                        number_of_chunks,
                        dist,
                        prioritized_packets=[1, 3, 5])
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("### LT with prioritized Packets AND pseudo_decoder ###")
    start = time.time()
    number_of_chunks = 700
    dist = IdealSolitonDistribution(number_of_chunks, seed=2)
    pseudo = LTBPDecoder.pseudo_decoder(number_of_chunks)
    encoder = LTEncoder(file,
                        number_of_chunks,
                        dist,
                        pseudo_decoder=pseudo,
                        prioritized_packets=[1, 3, 5])
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")
    print("### LT with prioritized Packets AND Gauss-pseudo_decoder ###")
    start = time.time()
    number_of_chunks = 700
    dist = IdealSolitonDistribution(number_of_chunks, seed=2)
    pseudo = LTDecoder.pseudo_decoder(number_of_chunks)
    encoder = LTEncoder(file,
                        number_of_chunks,
                        dist,
                        pseudo_decoder=pseudo,
                        prioritized_packets=[1, 3, 5])
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("##### Robust Soliton Distribution #####")
    print("### LT without prioritized Packets ###")
    start = time.time()
    number_of_chunks = 700
    dist = RobustSolitonDistribution(number_of_chunks, seed=2)
    encoder = LTEncoder(file, number_of_chunks, dist)
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("### LT without prioritized Packets with Gauss-pseudo_decoder ###")
    start = time.time()
    number_of_chunks = 700
    dist = RobustSolitonDistribution(number_of_chunks, seed=2)
    pseudo = LTDecoder.pseudo_decoder(number_of_chunks)
    encoder = LTEncoder(file, number_of_chunks, dist, pseudo_decoder=pseudo)
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("### LT with prioritized Packets ###")
    start = time.time()
    number_of_chunks = 700
    dist = RobustSolitonDistribution(number_of_chunks, seed=2)
    encoder = LTEncoder(file,
                        number_of_chunks,
                        dist,
                        prioritized_packets=[1, 3, 5])
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("### LT with prioritized Packets AND Gauss-pseudo_decoder ###")
    start = time.time()
    number_of_chunks = 700
    dist = RobustSolitonDistribution(number_of_chunks, seed=2)
    pseudo = LTDecoder.pseudo_decoder(number_of_chunks)
    encoder = LTEncoder(file,
                        number_of_chunks,
                        dist,
                        pseudo_decoder=pseudo,
                        prioritized_packets=[1, 3, 5])
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")
    print(
        "###### Online Codec - number_of_chunks = 2500, eps=0.01, quality=3 ######"
    )
    print("### Online without pseudo_decoder ###")
    start = time.time()
    number_of_chunks = 2500
    epsilon = 0.01
    quality = 3
    dist = OnlineDistribution(epsilon)
    # infer number_of_chunks form Distribution:
    number_of_chunks = dist.get_size()
    pseudo = OnlineBPDecoder.pseudo_decoder()
    encoder = OnlineEncoder(file, number_of_chunks, dist, epsilon, quality)
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")

    print("### Online with pseudo_decoder ###")
    start = time.time()
    number_of_chunks = 2500
    epsilon = 0.01
    quality = 3
    dist = OnlineDistribution(epsilon)
    # infer number_of_chunks form Distribution:
    number_of_chunks = dist.get_size()
    pseudo = OnlineBPDecoder.pseudo_decoder()
    encoder = OnlineEncoder(file,
                            number_of_chunks,
                            dist,
                            epsilon,
                            quality,
                            pseudo_decoder=pseudo)
    encoder.encode_to_packets()
    end = time.time() - start
    print("Finished encoding after " + str(round(end, 4)) + " sec. " +
          str(len(encoder.get_encoded_packets())) + " Packets encoded.\n")
Пример #10
0
def test_suite(as_dna, decoder_instance, distribution, use_header,
               implicit_mode):
    dir_path = os.getcwd()
    try:
        os.remove(dir_path + "/" + file)
    except:
        print("Not deleting, File did not exists")
    shutil.copyfile(dir_path + "/" + cmp_file, dir_path + "/" + file)
    chunksize = 200
    number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size(
        file, chunksize)
    pseudo_decoder = decoder_instance.pseudo_decoder(number_of_chunks)
    if distribution == "robust":
        dist = RobustSolitonDistribution(S=number_of_chunks, delta=0.2, seed=2)
    elif distribution == "ideal":
        dist = IdealSolitonDistribution(S=number_of_chunks, seed=2)
    else:
        dist = ErlichZielinskiRobustSolitonDistribution(k=number_of_chunks,
                                                        delta=0.2,
                                                        seed=2)
    rules = FastDNARules() if as_dna else None
    encoder = LTEncoder(file,
                        number_of_chunks,
                        dist,
                        chunk_size=chunksize,
                        pseudo_decoder=pseudo_decoder,
                        rules=rules,
                        insert_header=use_header,
                        number_of_chunks_len_format="H",
                        id_len_format="H",
                        used_packets_len_format="H",
                        implicit_mode=implicit_mode)
    encoder.encode_to_packets()
    encoder.save_packets(split_to_multiple_files=True, save_as_dna=as_dna)
    assert pseudo_decoder.is_decoded() and pseudo_decoder.getSolvedCount(
    ) == encoder.number_of_chunks
    assert os.path.exists(out_dir)
    decoder = decoder_instance(out_dir,
                               use_headerchunk=use_header,
                               dist=dist,
                               implicit_mode=implicit_mode)
    decoder.decodeFolder(number_of_chunks_len_format="H",
                         seed_len_format="H",
                         degree_len_format="H")
    assert decoder.is_decoded() and decoder.getSolvedCount(
    ) == encoder.number_of_chunks
    os.remove(file)
    decoder.saveDecodedFile(print_to_output=False)
    if not use_header:
        out_file = "DEC_LT_" + file
    else:
        out_file = file
    assert os.path.exists(out_file) and filecmp.cmp(out_file, cmp_file)
    if decoder_instance == LTBPDecoder:
        # since ApproxDecoder defines an upper bound Gauss-Decoder MUST be able to decode!
        decoder = LTDecoder(out_dir,
                            use_headerchunk=use_header,
                            dist=dist,
                            implicit_mode=implicit_mode)
        decoder.decodeFolder(number_of_chunks_len_format="H",
                             seed_len_format="H",
                             degree_len_format="H")
        assert (decoder.is_decoded()
                and decoder.getSolvedCount() == encoder.number_of_chunks)
        os.remove(out_file)
        decoder.saveDecodedFile(print_to_output=False)
        assert os.path.exists(out_file) and filecmp.cmp(out_file, cmp_file)
    shutil.rmtree(out_dir)
Пример #11
0
def test_size_shrink():
    dir_path = os.getcwd()
    try:
        os.remove(dir_path + "/" + file)
    except:
        print("Not deleting, File did not exists")
    shutil.copyfile(dir_path + "/" + cmp_file, dir_path + "/" + file)
    chunksize = 200
    number_of_chunks = Encoder.get_number_of_chunks_for_file_with_chunk_size(
        file, chunksize)
    dist = RobustSolitonDistribution(S=number_of_chunks, delta=0.2, seed=2)
    encoder1 = LTEncoder(file,
                         number_of_chunks,
                         dist,
                         chunk_size=chunksize,
                         rules=None,
                         insert_header=False,
                         number_of_chunks_len_format="H",
                         id_len_format="H",
                         used_packets_len_format="H",
                         implicit_mode=False)
    number_of_chunks = encoder1.number_of_chunks
    encoder1.prepareEncoder()
    packet1 = encoder1.create_new_packet()

    encoder2 = LTEncoder(file,
                         number_of_chunks,
                         dist,
                         chunk_size=chunksize,
                         rules=None,
                         insert_header=False,
                         number_of_chunks_len_format="H",
                         id_len_format="H",
                         used_packets_len_format="H",
                         implicit_mode=True)
    encoder2.prepareEncoder()
    packet2 = encoder2.create_new_packet()
    assert len(packet1.get_dna_struct(True)) > len(
        packet2.get_dna_struct(True))

    encoder3 = LTEncoder(file,
                         number_of_chunks,
                         dist,
                         chunk_size=chunksize,
                         rules=None,
                         insert_header=False,
                         number_of_chunks_len_format="I",
                         id_len_format="I",
                         used_packets_len_format="I",
                         implicit_mode=False)
    encoder3.prepareEncoder()
    packet3 = encoder3.create_new_packet()

    encoder4 = LTEncoder(file,
                         number_of_chunks,
                         dist,
                         chunk_size=chunksize,
                         rules=None,
                         insert_header=False,
                         number_of_chunks_len_format="I",
                         id_len_format="I",
                         used_packets_len_format="I",
                         implicit_mode=True)
    encoder4.prepareEncoder()
    packet4 = encoder4.create_new_packet()
    assert len(packet3.get_dna_struct(True)) > len(
        packet4.get_dna_struct(True))
    assert len(packet3.get_dna_struct(True)) > len(
        packet1.get_dna_struct(True))
    assert len(packet4.get_dna_struct(True)) > len(
        packet2.get_dna_struct(True))