def load(self, data_stream, header_stream, huffman_stream, partition_map, pics_size_index=0, data_base=None, data_size=None, header_base=None, header_size=None, huffman_offset=None, huffman_size=None): super().load(data_stream, data_base, data_size) data_size = self._data_size pics_size_index = int(pics_size_index) assert pics_size_index >= 0 header_base, header_size = stream_fit(header_stream, header_base, header_size) huffman_offset, huffman_size = stream_fit(huffman_stream, huffman_offset, huffman_size) assert header_size % 3 == 0 assert huffman_size >= 4 * HUFFMAN_NODE_COUNT chunk_count = header_size // 3 chunk_offsets = [None] * chunk_count for i in range(chunk_count): byte0, byte1, byte2 = stream_unpack('<BBB', header_stream) offset = byte0 | (byte1 << 8) | (byte2 << 16) if offset < 0xFFFFFF: chunk_offsets[i] = offset chunk_offsets.append(data_size) for i in reversed(range(chunk_count)): if chunk_offsets[i] is None: chunk_offsets[i] = chunk_offsets[i + 1] assert all(0 <= chunk_offsets[i] <= data_size for i in range(chunk_count)) assert all(chunk_offsets[i] <= chunk_offsets[i + 1] for i in range(chunk_count)) huffman_nodes = list( stream_unpack_array('<HH', huffman_stream, HUFFMAN_NODE_COUNT, scalar=False)) self._chunk_count = chunk_count self._chunk_offsets = chunk_offsets self._header_stream = header_stream self._header_base = header_base self._header_size = header_size self._huffman_stream = huffman_stream self._huffman_offset = huffman_offset self._huffman_size = huffman_size self._partition_map = partition_map self._pics_size_index = pics_size_index self._huffman_nodes = huffman_nodes self.pics_size = self._build_pics_size() return self
def load(self, data_stream, header_stream, data_base=None, data_size=None, header_base=None, header_size=None): super().load(data_stream, data_base, data_size) data_size = self._data_size header_base, header_size = stream_fit(header_stream, header_base, header_size) assert header_size % 4 == 0 chunk_count = header_size // 4 chunk_offsets = list( stream_unpack_array('<L', header_stream, chunk_count)) chunk_offsets.append(data_size) assert all(0 <= chunk_offsets[i] <= data_size for i in range(chunk_count)) assert all(chunk_offsets[i] <= chunk_offsets[i + 1] for i in range(chunk_count)) self._chunk_count = chunk_count self._chunk_offsets = chunk_offsets self._header_stream = header_stream self._header_base = header_base self._header_size = header_size return self
def load(self, data_stream, header_stream, huffman_stream, partition_map, pics_dimensions_index=0, data_base=None, data_size=None, header_base=None, header_size=None, huffman_offset=None, huffman_size=None): super().load(data_stream, data_base, data_size) data_size = self._data_size pics_dimensions_index = int(pics_dimensions_index) assert pics_dimensions_index >= 0 header_base, header_size = stream_fit(header_stream, header_base, header_size) huffman_offset, huffman_size = stream_fit(huffman_stream, huffman_offset, huffman_size) assert header_size % struct.calcsize('<BBB') == 0 assert huffman_size >= struct.calcsize('<HH') * HUFFMAN_NODES_COUNT chunk_count = header_size // struct.calcsize('<BBB') chunk_offsets = [None] * chunk_count for i in range(chunk_count): byte0, byte1, byte2 = stream_unpack('<BBB', header_stream) offset = byte0 | (byte1 << 8) | (byte2 << 16) if offset < 0xFFFFFF: chunk_offsets[i] = offset chunk_offsets.append(data_size) for i in reversed(range(chunk_count)): if chunk_offsets[i] is None: chunk_offsets[i] = chunk_offsets[i + 1] assert all(0 <= chunk_offsets[i] <= data_size for i in range(chunk_count)) assert all(chunk_offsets[i] <= chunk_offsets[i + 1] for i in range(chunk_count)) huffman_nodes = list(stream_unpack_array('<HH', huffman_stream, HUFFMAN_NODES_COUNT, scalar=False)) self._chunk_count = chunk_count self._chunk_offsets = chunk_offsets self._header_stream = header_stream self._header_base = header_base self._header_size = header_size self._huffman_stream = huffman_stream self._huffman_offset = huffman_offset self._huffman_size = huffman_size self._partition_map = partition_map self._pics_dimensions_index = pics_dimensions_index self._huffman_nodes = huffman_nodes self.pics_dimensions = self._build_pics_dimensions() return self
def load(self, data_stream, data_base=None, data_size=None): logger = logging.getLogger() self.clear() data_base, data_size = stream_fit(data_stream, data_base, data_size) logger.info('%r.load(data_stream=%r, data_base=0x%X, data_size=0x%X)', self, data_stream, data_base, data_size) self._data_stream = data_stream self._data_base = data_base self._data_size = data_size return self
def load(self, data_stream, header_stream, data_base=None, data_size=None, header_base=None, header_size=None, planes_count=3, carmacized=True): super().load(data_stream, data_base, data_size) data_size = self._data_size planes_count = int(planes_count) carmacized = bool(carmacized) assert planes_count > 0 header_base, header_size = stream_fit(header_stream, header_base, header_size) rlew_tag = stream_unpack('<H', header_stream)[0] assert (header_size - 2) % 4 == 0 chunk_count = (header_size - 2) // 4 chunk_offsets = [None] * chunk_count for i in range(chunk_count): offset = stream_unpack('<L', header_stream)[0] if 0 < offset < 0xFFFFFFFF: chunk_offsets[i] = offset chunk_offsets.append(data_size) for i in reversed(range(chunk_count)): if chunk_offsets[i] is None: chunk_offsets[i] = chunk_offsets[i + 1] assert all(0 < chunk_offsets[i] <= data_size for i in range(chunk_count)) assert all(chunk_offsets[i] <= chunk_offsets[i + 1] for i in range(chunk_count)) self._chunk_count = chunk_count self._chunk_offsets = chunk_offsets self._header_stream = header_stream self._header_base = header_base self._header_size = header_size self._carmacized = carmacized self._rlew_tag = rlew_tag self.planes_count = planes_count return self
def load(self, data_stream, header_stream, data_base=None, data_size=None, header_base=None, header_size=None): super().load(data_stream, data_base, data_size) data_size = self._data_size header_base, header_size = stream_fit(header_stream, header_base, header_size) assert header_size % struct.calcsize('<L') == 0 chunk_count = header_size // struct.calcsize('<L') chunk_offsets = list(stream_unpack_array('<L', header_stream, chunk_count)) chunk_offsets.append(data_size) assert all(0 <= chunk_offsets[i] <= data_size for i in range(chunk_count)) assert all(chunk_offsets[i] <= chunk_offsets[i + 1] for i in range(chunk_count)) self._chunk_count = chunk_count self._chunk_offsets = chunk_offsets self._header_stream = header_stream self._header_base = header_base self._header_size = header_size return self
def load(self, data_stream, header_stream, data_base=None, data_size=None, header_base=None, header_size=None, planes_count=3, carmacized=True): super().load(data_stream, data_base, data_size) data_size = self._data_size planes_count = int(planes_count) carmacized = bool(carmacized) assert planes_count > 0 header_base, header_size = stream_fit(header_stream, header_base, header_size) rlew_tag = stream_unpack('<H', header_stream)[0] assert (header_size - struct.calcsize('<H')) % struct.calcsize('<L') == 0 chunk_count = (header_size - struct.calcsize('<H')) // struct.calcsize('<L') chunk_offsets = [None] * chunk_count for i in range(chunk_count): offset = stream_unpack('<L', header_stream)[0] if 0 < offset < 0xFFFFFFFF: chunk_offsets[i] = offset chunk_offsets.append(data_size) for i in reversed(range(chunk_count)): if chunk_offsets[i] is None: chunk_offsets[i] = chunk_offsets[i + 1] assert all(0 < chunk_offsets[i] <= data_size for i in range(chunk_count)) assert all(chunk_offsets[i] <= chunk_offsets[i + 1] for i in range(chunk_count)) self._chunk_count = chunk_count self._chunk_offsets = chunk_offsets self._header_stream = header_stream self._header_base = header_base self._header_size = header_size self._carmacized = carmacized self._rlew_tag = rlew_tag self.planes_count = planes_count return self
def load(self, data_stream, data_base=None, data_size=None): self.clear() data_base, data_size = stream_fit(data_stream, data_base, data_size) self._data_stream = data_stream self._data_base = data_base self._data_size = data_size