コード例 #1
0
ファイル: BPDecoder.py プロジェクト: umr-ds/NOREC4DNA
 def updatePackets(self, packet: Packet) -> bool:
     if (packet.get_degree() == 1 and
             next(iter(packet.get_used_packets())) < self.number_of_chunks
             and (next(iter(packet.get_used_packets())) != 0
                  or not self.use_headerchunk)):
         # Directly add Packets that are degree == 1 (except for HeaderPacket)
         self.decodedPackets.add(packet)
         return self.is_decoded()
     self.queue.append(packet)
     self.solve()
コード例 #2
0
ファイル: LTEncoder.py プロジェクト: umr-ds/NOREC4DNA
    def create_new_packet(self, seed=None) -> Packet:
        if seed is None:
            seed: int = self.generate_new_checkblock_id(self.sequential_seed)
        if self.implicit_mode:  # in implicit mode we want to be able derive the used chunks from having only the seed
            self.dist.set_seed(seed)
        degree: int = self.dist.getNumber()

        packet_numbers: typing.Set[int] = self.choose_packet_numbers(degree,
                                                                     seed=seed)
        chunks: typing.List[bytes] = [self.chunks[i] for i in packet_numbers]
        self.setOfEncodedPackets |= set(packet_numbers)
        return Packet(
            listXOR(chunks),
            packet_numbers,
            self.number_of_chunks,
            read_only=False,
            seed=seed,
            error_correction=self.error_correction,
            implicit_mode=self.implicit_mode,
            packet_len_format=self.packet_len_format,
            crc_len_format=self.crc_len_format,
            number_of_chunks_len_format=self.number_of_chunks_len_format,
            used_packets_len_format=self.used_packets_len_format,
            id_len_format=self.id_len_format,
            save_number_of_chunks_in_packet=self.
            save_number_of_chunks_in_packet)
コード例 #3
0
 def __init__(self, packet: Packet, last_chunk_len_format: str = "I"):
     assert packet.get_used_packets().issubset({0}), "only first packet can be HeaderPacket"
     if isinstance(packet.data, numpy.ndarray):
         self.data: bytes = packet.data.tobytes()
     else:
         self.data: bytes = packet.data
     self.last_chunk_len_format: str = last_chunk_len_format
     self.last_chunk_length, self.file_name = self.decode_header_info()
コード例 #4
0
 def input_new_packet(self, packet: Packet) -> bool:
     self.pseudoCount += 1
     packets: typing.List[bool] = packet.get_bool_array_used_packets()
     if self.count:
         for i in range(len(packets)):
             if i in self.counter.keys():
                 if packets[i]:
                     self.counter[i] += 1
             else:
                 self.counter[i] = 1
     if self.GEPP is None:
         self.GEPP = GEPP(np.array([packet.get_bool_array_used_packets()], dtype=bool),
                          np.array([[packet.get_data()]], dtype=bytes), )
     else:
         self.GEPP.addRow(packet.get_bool_array_used_packets(), np.frombuffer(packet.get_data(), dtype="uint8"), )
     if self.isPseudo and not self.read_all_before_decode and self.GEPP.isPotentionallySolvable():
         return self.GEPP.solve(partial=False)
     return False
コード例 #5
0
ファイル: BPDecoder.py プロジェクト: umr-ds/NOREC4DNA
 def reduceAll(self, packet: Packet) -> bool:
     # lookup all packets for this to solve with ( when this packet has a subset of used Packets)
     fin: bool = False
     lookup: typing.List[int] = [
         i for i in self.degreeToPacket.keys() if packet.get_degree() < i
     ]
     for i in lookup:
         if not isinstance(self.degreeToPacket[i], set):
             self.degreeToPacket[i] = set()
         for p in self.degreeToPacket[i].copy():
             p_used = p.get_used_packets()
             pack_used = packet.get_used_packets()
             if len(pack_used) < len(p_used) and pack_used.issubset(p_used):
                 self.degreeToPacket[i].remove(p)
                 degree = self.compareAndReduce(p, packet)
                 if isinstance(degree, bool) and degree is True:
                     return degree
     degree: int = packet.get_degree()
     lookup: typing.List[int] = [
         i for i in self.degreeToPacket.keys() if packet.get_degree() > i
     ]
     for i in lookup:
         if not isinstance(self.degreeToPacket[i], set):
             self.degreeToPacket[i] = set()
         for p in self.degreeToPacket[i].copy():
             p_used = p.get_used_packets()
             pack_used = packet.get_used_packets()
             if len(pack_used) > len(p_used) and p_used.issubset(pack_used):
                 try:
                     self.degreeToPacket[degree].remove(packet)
                     degree = self.compareAndReduce(packet, p)
                     if isinstance(degree, bool) and degree is True:
                         return degree
                 except:
                     continue
                 # If we already reduced a Packet with the same used_packets, there is no need to do it again
     return fin or self.is_decoded()
コード例 #6
0
    def parse_raw_packet(self, packet: bytes, crc_len_format: str = "L", number_of_chunks_len_format: str = "I",
                         degree_len_format: str = "I", seed_len_format: str = "I") -> typing.Union[str, Packet]:
        crc_len = -struct.calcsize("<" + crc_len_format)
        if self.error_correction.__code__.co_name == crc32.__code__.co_name:
            payload: bytes = packet[:crc_len]
            crc: int = struct.unpack("<" + crc_len_format, packet[crc_len:])[0]
            calced_crc: int = calc_crc(payload)
            if crc != calced_crc:  # If the Packet is corrupt, try next one
                print("[-] CRC-Error - " + str(hex(crc)) + " != " + str(hex(calced_crc)))
                self.corrupt += 1
                return "CORRUPT"
        else:
            crc_len = None
            try:
                packet = self.error_correction(packet)
            except:
                self.corrupt += 1
                return "CORRUPT"
        if self.implicit_mode:
            degree_len_format = ""
        struct_str: str = "<" + number_of_chunks_len_format + degree_len_format + seed_len_format
        struct_len: int = struct.calcsize(struct_str)
        len_data: typing.Union[int, typing.Tuple[int, int], typing.Tuple[int, int, int]] = struct.unpack(struct_str,
                                                                                                         packet[
                                                                                                         0:struct_len])
        degree: typing.Optional[int] = None
        if self.static_number_of_chunks is None:
            if self.implicit_mode:
                number_of_chunks, seed = len_data
            else:
                number_of_chunks, degree, seed = len_data
            self.number_of_chunks = xor_mask(number_of_chunks, number_of_chunks_len_format)
        else:
            if self.implicit_mode:
                seed = len_data
            else:
                degree, seed = len_data
        seed: int = xor_mask(seed, seed_len_format)
        if degree is None:
            self.dist.set_seed(seed)
            degree: int = self.dist.getNumber()
        else:
            degree: int = xor_mask(degree, degree_len_format)
        used_packets = self.choose_packet_numbers(degree, seed)
        data = packet[struct_len:crc_len]
        self.correct += 1

        return Packet(data, used_packets, self.number_of_chunks, read_only=True, error_correction=self.error_correction,
                      save_number_of_chunks_in_packet=self.static_number_of_chunks is None)
コード例 #7
0
ファイル: LTEncoder.py プロジェクト: umr-ds/NOREC4DNA
 def encodePriotizedPackets(self,
                            error_correction: typing.Callable = nocode):
     for num in self.prioritized_packets:
         new_pack = Packet(
             self.chunks[num], {num},
             self.number_of_chunks,
             read_only=False,
             implicit_mode=self.implicit_mode,
             error_correction=error_correction,
             packet_len_format=self.packet_len_format,
             crc_len_format=self.crc_len_format,
             number_of_chunks_len_format=self.number_of_chunks_len_format,
             used_packets_len_format=self.used_packets_len_format,
             id_len_format=self.id_len_format)
         if self.pseudo_decoder is not None:
             self.pseudo_decoder.input_new_packet(new_pack)
         self.encodedPackets.add(new_pack)
コード例 #8
0
 def compareAndReduce(self, packet: Packet, other: Packet) -> typing.Union[bool, int]:
     if self.file is None:  # In case of PseudoDecode: DO NOT REALLY COMPUTE XOR
         packet.remove_packets(other.get_used_packets())
     else:
         packet.xor_and_remove_packet(other)
     degree = packet.get_degree()
     if (degree not in self.degreeToPacket) or (not isinstance(self.degreeToPacket[degree], set)):
         self.degreeToPacket[degree] = set()
     self.degreeToPacket[degree].add(packet)
     if self.is_decoded():
         return True
     self.queue.append(packet)
     return degree
コード例 #9
0
 def saveDecodedFile(self, last_chunk_len_format: str = "I", null_is_terminator: bool = False,
                     print_to_output: bool = True) -> None:
     assert self.is_decoded(), "Can not save File: Unable to reconstruct."
     if self.use_headerchunk:
         self.headerChunk = HeaderChunk(Packet(self.GEPP.b[0], {0}, self.number_of_chunks, read_only=True),
                                        last_chunk_len_format=last_chunk_len_format)
     file_name = "DEC_" + os.path.basename(self.file) if self.file is not None else "LT.BIN"
     if self.headerChunk is not None:
         file_name = self.headerChunk.get_file_name().decode("utf-8")
     output_concat: bytes = b""
     file_name: str = file_name.split("\x00")[0]
     try:
         with open(file_name, "wb") as f:
             for x in self.GEPP.result_mapping:
                 if 0 != x or not self.use_headerchunk:
                     if self.number_of_chunks - 1 == x and self.use_headerchunk:
                         output: typing.Union[bytes, np.array] = self.GEPP.b[x][0][
                                                                 0: self.headerChunk.get_last_chunk_length()]
                         output_concat += output.tobytes()
                         f.write(output)
                     else:
                         if null_is_terminator:
                             splitter: str = self.GEPP.b[x].tostring().decode().split("\x00")
                             output = splitter[0].encode()
                             if type(output) == bytes:
                                 output_concat += output
                             else:
                                 output_concat += output.tobytes()
                             f.write(output)
                             if len(splitter) > 1:
                                 break  # since we are in null-terminator mode, we exit once we see the first 0-byte
                         else:
                             output = self.GEPP.b[x]
                             output_concat += output.tobytes()
                             f.write(output)
         print("Saved file as '" + str(file_name) + "'")
     except Exception as ex:
         raise ex
     if print_to_output:
         print("Result:")
         print(output_concat.decode("utf-8"))
コード例 #10
0
ファイル: BPDecoder.py プロジェクト: umr-ds/NOREC4DNA
 def compareAndReduce(self, packet: Packet,
                      other: Packet) -> typing.Union[bool, int]:
     if self.file is None:
         packet.remove_packets(other.get_used_packets())
     else:
         packet.xor_and_remove_packet(other)
     degree = packet.get_degree()
     if (degree not in self.degreeToPacket) or (not isinstance(
             self.degreeToPacket[degree], set)):
         self.degreeToPacket[degree] = set()
     if degree == 1:
         [x] = (packet.get_used_packets()
                )  # Unpacking -> Fastest way to extract Element from Set
         if x > self.number_of_chunks:  # we got a new AUX-Packet
             raise RuntimeError("this should not have happened!")
         else:
             if x != 0 or not self.use_headerchunk:
                 self.decodedPackets.add(
                     packet)  # Add Packet to decoded Packets
     self.degreeToPacket[degree].add(packet)
     if self.is_decoded():
         return True
     self.queue.append(packet)
     return degree
コード例 #11
0
 def parallel_to_normal(par_packet,
                        error_correction,
                        dist,
                        quality=7,
                        epsilon=0.06):
     """
     Converts parallel packets either to RU10Packets, normal Packets or OnlinePackets based on the original class of
     the packet.
     :param par_packet:
     :param error_correction:
     :param dist:
     :param quality:
     :param epsilon:
     :return: Converted packet
     """
     if par_packet.get_org_class() == "RU10Packet":
         packet = RU10Packet(
             data=par_packet.data,
             used_packets=par_packet.used_packets,
             total_number_of_chunks=par_packet.total_number_of_chunks,
             id=par_packet.id,
             dist=dist,
             error_correction=error_correction,
             packet_len_format=par_packet.packet_len_format,
             crc_len_format=par_packet.crc_len_format,
             number_of_chunks_len_format=par_packet.
             number_of_chunks_len_format,
             id_len_format=par_packet.id_len_format,
             save_number_of_chunks_in_packet=par_packet.
             safe_number_of_chunks_in_packet)
         packet.error_prob = par_packet.error_prob
     elif par_packet.get_org_class() == "Packet":
         packet = Packet(
             data=par_packet.data,
             used_packets=par_packet.used_packets,
             total_number_of_chunks=par_packet.total_number_of_chunks,
             error_correction=error_correction,
             packet_len_format=par_packet.packet_len_format,
             crc_len_format=par_packet.crc_len_format,
             number_of_chunks_len_format=par_packet.
             number_of_chunks_len_format,
             id_len_format=par_packet.id_len_format)
         packet.error_prob = par_packet.error_prob
     elif par_packet.get_org_class() == "OnlinePacket":
         packet = OnlinePacket(
             data=par_packet.data,
             used_packets=par_packet.used_packets,
             check_block_number=par_packet.id,
             total_number_of_chunks=par_packet.total_number_of_chunks,
             error_correction=error_correction,
             dist=dist,
             quality=quality,
             epsilon=epsilon,
             crc_len_format=par_packet.crc_len_format,
             number_of_chunks_len_format=par_packet.
             number_of_chunks_len_format,
             check_block_number_len_format=par_packet.id_len_format)
         packet.error_prob = par_packet.error_prob
     else:
         raise RuntimeError("Unsupported packet type")
     return packet
コード例 #12
0
 def addPacket(self, packet: Packet) -> None:
     if (packet.get_degree() not in self.degreeToPacket) or (
             not isinstance(self.degreeToPacket[packet.get_degree()], set)):
         self.degreeToPacket[packet.get_degree()] = set()
     self.number_of_chunks = packet.get_total_number_of_chunks()
     self.degreeToPacket[packet.get_degree()].add(packet)
コード例 #13
0
def __handle_lt(packet: Packet):
    return packet.get_used_packets()
コード例 #14
0
ファイル: RU10Decoder.py プロジェクト: umr-ds/NOREC4DNA
 def saveDecodedFile(self, last_chunk_len_format: str = "I", null_is_terminator: bool = False,
                     print_to_output: bool = True, return_file_name=False, partial_decoding: bool = True) -> \
         typing.Union[bytes, str]:
     """
     Saves the file - if decoded. The filename is either taken from the headerchunk or generated based on the input
     filename.
     :param partial_decoding: perform partial decoding if full decoding failed, missing parts will be filled with "\x00"
     :param return_file_name: if set to true, this function will return the filename under which the file as been saved
     :param last_chunk_len_format: Format of the last chunk length
     :param null_is_terminator: True: The file is handled as null-terminated C-String.
     :param print_to_output: True: Result we be printed to the command line.
     :return:
     """
     assert self.is_decoded(
     ) or partial_decoding, "Can not save File: Unable to reconstruct. You may try saveDecodedFile(partial_decoding=True)"
     if partial_decoding:
         self.solve(partial=True)
     dirty = False
     if self.use_headerchunk:
         header_row = self.GEPP.result_mapping[0]
         if header_row >= 0:
             self.headerChunk = HeaderChunk(
                 Packet(self.GEPP.b[header_row], {0},
                        self.number_of_chunks,
                        read_only=True),
                 last_chunk_len_format=last_chunk_len_format)
     file_name = "DEC_" + os.path.basename(
         self.file) if self.file is not None else "RU10.BIN"
     output_concat = b""
     if self.headerChunk is not None:
         try:
             file_name = self.headerChunk.get_file_name().decode("utf-8")
         except Exception as ex:
             print("Warning:", ex)
     file_name = file_name.split("\x00")[0]
     with open(file_name, "wb") as f:
         for x in self.GEPP.result_mapping:
             if x < 0:
                 f.write(b"\x00" * len(self.GEPP.b[x][0]))
                 dirty = True
                 continue
             if 0 != x or not self.use_headerchunk:
                 if self.number_of_chunks - 1 == x and self.use_headerchunk:
                     output = self.GEPP.b[x][0][0:self.headerChunk.
                                                get_last_chunk_length()]
                     output_concat += output.tobytes()
                     f.write(output)
                 else:
                     if null_is_terminator:
                         splitter = self.GEPP.b[x].tostring().decode(
                         ).split("\x00")
                         output = splitter[0].encode()
                         if type(output) == bytes:
                             output_concat += output
                         else:
                             output_concat += output.tobytes()
                         f.write(output)
                         if len(splitter) > 1:
                             break  # since we are in null-terminator mode, we exit once we see the first 0-byte
                     else:
                         output = self.GEPP.b[x]
                         if type(output) == bytes:
                             output_concat += output
                         else:
                             output_concat += output.tobytes()
                         f.write(output)
     print("Saved file as '" + str(file_name) + "'")
     if dirty:
         print(
             "Some parts could not be restored, file WILL contain sections with \\x00 !"
         )
     if print_to_output:
         print("Result:")
         print(output_concat.decode("utf-8"))
     if self.progress_bar is not None:
         self.progress_bar.update(self.number_of_chunks,
                                  Corrupt=self.corrupt)
     if return_file_name:
         return file_name
     return output_concat