Exemple #1
0
    def import_data(self, buffer, parent=None):
        enable = mrc.property_get(self.enable, parent)

        if not enable:
            return mrc.TransformResult(payload=buffer, end_offset=len(buffer))

        output = bytearray(len(buffer) * 2)
        for i in range(len(buffer)):
            output[2 * i] = buffer[i] & 0x0f
            output[2 * i + 1] = buffer[i] >> 4
        return mrc.TransformResult(payload=output, end_offset=len(buffer))
Exemple #2
0
    def export_data(self, buffer, parent=None):
        enable = mrc.property_get(self.enable, parent)

        if not enable:
            return mrc.TransformResult(payload=buffer)

        if buffer:
            assert max(buffer) <= 0xf
        output = bytearray(len(buffer) // 2)
        for i in range(len(buffer)):
            if i % 2:
                output[i // 2] |= buffer[i] << 4
            else:
                output[i // 2] |= buffer[i]
        return mrc.TransformResult(payload=output, end_offset=len(buffer))
Exemple #3
0
 def export_data(self, buffer, parent=None):
     output = bytearray(len(buffer) // 2)
     for i in range(len(output)):
         output[i] |= buffer[2 * i] << 4
         output[i] |= buffer[2 * i + 1]
     return mrc.TransformResult(payload=bytes(output),
                                end_offset=len(buffer))
Exemple #4
0
 def import_data(self, buffer, parent=None):
     output = bytearray(len(buffer) * 2)
     for i in range(len(buffer)):
         output[2 * i] = buffer[i] >> 4
         output[2 * i + 1] = buffer[i] & 0x0f
     return mrc.TransformResult(payload=bytes(output),
                                end_offset=len(buffer))
Exemple #5
0
    def import_data( self, buffer, parent=None ):
        r = self.N - self.F
        flags = 0
        text_buf = bytearray( b' '*(self.N+self.F-1) );
        result = bytearray()
        index = 0

        while index < len( buffer ):
            flags >>= 1
            if (flags & 0x100) == 0:
                flags = buffer[index] | 0xff00;
                index += 1
            if (flags & 1):
                c = buffer[index]
                index += 1
                result.append( c )
                text_buf[r] = c
                r = (r+1) & (self.N-1)
            else:
                i = buffer[index]
                j = buffer[index+1]
                index += 2
                i |= (j & 0xf0) << 4
                j = (j & 0x0f) + self.THRESHOLD
                for k in range( j+1 ):
                    c = text_buf[(i+k) & (self.N-1)]
                    result.append( c )
                    text_buf[r] = c
                    r = (r+1) & (self.N-1)
                    
        return mrc.TransformResult( payload=bytes( result ), end_offset=len( buffer ) )
Exemple #6
0
 def import_data(self, buffer, parent=None):
     limit = len(buffer) if not self.length else min(
         len(buffer), self.length)
     payload = bytes([
         bits.reverse_bits(c) ^ self.KEY[i % len(self.KEY)]
         for i, c in enumerate(buffer[:limit])
     ])
     return mrc.TransformResult(payload=payload, end_offset=limit)
 def import_data(self, buffer):
     assert len(buffer) % 64 == 0
     result = bytearray(len(buffer))
     for i in range(0, len(buffer), 64):
         deint = buffer[i:i + 64:2] + buffer[i + 1:i + 64:2]
         result[i:i + 64] = bytes(
             [deint[8 * (j % 8) + (j // 8)] for j in range(64)])
     return mrc.TransformResult(payload=bytes(result),
                                end_offset=len(result))
Exemple #8
0
 def import_data(self, buffer, parent=None):
     result = bytearray(50 * 25)
     pointer = 0
     while pointer < len(buffer):
         size = utils.from_uint16_be(buffer[pointer:pointer + 2])
         offset = utils.from_uint16_be(buffer[pointer + 2:pointer + 4])
         result[offset:offset + size] = buffer[pointer + 4:pointer + 4 +
                                               size]
     return mrc.TransformResult(payload=result, end_offset=pointer)
Exemple #9
0
    def import_data(self, buffer, parent=None):
        if len(buffer) == 0:
            return mrc.TransformResult()

        lc = lzss.LZSSCompressor()
        size_comp = utils.from_uint32_le(buffer[0:4])

        if size_comp != len(buffer):
            logger.info('{}: File not compressed'.format(self))
            return mrc.TransformResult(payload=buffer, end_offset=len(buffer))

        size_raw = utils.from_uint32_le(buffer[4:8])
        result = lc.import_data(buffer[8:])
        if len(result.payload) != size_raw:
            logger.warning(
                '{}: Was expecting a decompressed size of {}, got {}!'.format(
                    self, size_raw, len(result.payload)))

        return result
Exemple #10
0
    def import_data(self, buffer, parent=None):
        decomp_size = utils.from_uint32_le(buffer[:4])
        max_bits = utils.from_uint16_le(buffer[4:6])  # should be 12

        lookup = [bytes((i, )) for i in range(256)]
        lookup.append(None)  # 256: error
        lookup.append(None)  # 257: end of data

        output = bytearray()

        bs = bits.BitStream(buffer, 6, bit_endian='big', io_endian='big')
        state = {'usebits': 9}

        def add_to_lookup(state, entry):
            if len(lookup) < (1 << max_bits):
                logger.debug('lookup[{}] = {}'.format(len(lookup), entry))
                lookup.append(entry)
                if len(lookup) == (1 << state['usebits']) - 1:
                    state['usebits'] = min(state['usebits'] + 1, max_bits)
                    logger.debug('usebits = {}'.format(state['usebits']))
            return

        fcode = bs.read(state['usebits'])
        match = lookup[fcode]
        logger.debug('fcode={},match={}'.format(fcode, match))
        output.extend(match)
        while True:
            ncode = bs.read(state['usebits'])
            logger.debug('ncode={}'.format(ncode))
            if ncode == 257:
                # end of data
                break
            elif ncode == 256:
                # error
                raise Exception('Found error code, data is not valid')
            elif ncode < len(lookup):
                nmatch = lookup[ncode]
            else:
                nmatch = match + match[0:1]
            logger.debug('match={}'.format(match))
            logger.debug('nmatch={}'.format(nmatch))
            output.extend(nmatch)

            # add code to lookup
            add_to_lookup(state, match + nmatch[0:1])
            match = nmatch

        if len(output) != decomp_size:
            logger.warning(
                '{}: was expecting data of size {}, got data of size {} instead'
                .format(self, decomp_size, len(output)))

        return mrc.TransformResult(payload=bytes(output),
                                   end_offset=len(buffer))
Exemple #11
0
    def import_data(self, buffer, parent=None):

        src = bytearray(buffer)
        dest = bytearray()
        lookup_pointer = 0
        bit_size = 9

        bitstore = utils.BitReader(src,
                                   start_offset=2,
                                   bits_reverse=True,
                                   output_reverse=True)
        eof = False

        loop = 0
        while True:
            if loop % 2 == 0:
                src[lookup_pointer:lookup_pointer + 2] = utils.to_uint16_le(
                    len(dest))
                lookup_pointer += 2

            while True:
                test = 0

                try:
                    test = bitstore.get_bits(bit_size)
                except IndexError:
                    eof = True
                    break

                if test != 0x100:
                    break
                bit_size += 1

            if eof:
                break

            if test <= 0xff:
                dest.append(test & 0xff)
            else:
                index = (test - 0x101) << 1
                if index > len(src):
                    print('Out of bounds! 0x{:04x}'.format(index))
                    break
                start = utils.from_uint16_le(src[index:index + 2])
                end = utils.from_uint16_le(src[index + 2:index + 4])
                dest.extend(dest[start:end])

            loop += 1

        return mrc.TransformResult(payload=bytes(dest), end_offset=len(buffer))
Exemple #12
0
    def import_data(self, buffer):
        unk1 = buffer[0]
        flags = buffer[1]

        rle_c = RLECompressor()
        dict_c = DictCompressor()

        pointer = 2
        if (flags & 2 != 0) and (flags & 1 != 0):
            pass1 = dict_c.import_data(buffer[2:])
            pass2 = rle_c.import_data(pass1.payload)
            return mrc.TransformResult(payload=pass2.payload,
                                       end_offset=pass1.end_offset + 2)
        elif (flags & 2 != 0) and (flags & 1 == 0):
            pass1 = dict_c.import_data(buffer[2:])
            return mrc.TransformResult(payload=pass1.payload,
                                       end_offset=pass1.end_offset + 2)
        elif (flags & 2 == 0) and (flags & 1 != 0):
            pass1 = rle_c.import_data(buffer[2:])
            return mrc.TransformResult(payload=pass1.payload,
                                       end_offset=pass1.end_offset + 2)
        # no compression
        return mrc.TransformResult(payload=buffer[2:], end_offset=len(buffer))
Exemple #13
0
    def import_data( self, buffer, parent=None ):
        final_length = utils.from_uint32_le( buffer[0:4] )
        i = 4
        out = bytearray()
        while (len( out ) < final_length):
            byte = buffer[i]
            if byte >= 128:
                out.extend( buffer[i+1:i+byte-126] )
                i += byte-126
            else:
                out.extend( buffer[i+1:i+2]*(byte+3) )
                i += 2

        return mrc.TransformResult( payload=bytes( out ), end_offset=i )
Exemple #14
0
 def import_data(self, buffer, parent=None):
     assert utils.is_bytes(buffer)
     pointer = 0
     result = bytearray()
     while pointer < len(buffer):
         test = buffer[pointer]
         pointer += 1
         if test & 0x80:
             result += buffer[pointer:pointer + (test & 0x7f)]
             pointer += (test & 0x7f)
         else:
             result += buffer[pointer:pointer + 1] * test
             pointer += 1
     return mrc.TransformResult(payload=bytes(result), end_offset=pointer)
Exemple #15
0
 def import_data(self, buffer, parent=None):
     result = bytearray()
     pointer = 0
     while (pointer < len(buffer)):
         test = buffer[pointer]
         pointer += 1
         length = test + 1
         if test & 0x80:
             length = ((test ^ 0xff) & 0xff) + 2
             result.extend((buffer[pointer] for i in range(length)))
             pointer += 1
         else:
             result.extend(buffer[pointer:pointer + length])
             pointer += length
     return mrc.TransformResult(payload=result, end_offset=pointer)
Exemple #16
0
    def import_data( self, buffer, parent=None ):
        final_length = utils.from_uint32_le( buffer[0:4] )
        i = 4
        out = bytearray()
        while (len( out ) < final_length):
            word = buffer[i:i+2]
            if word == b'\xfe\xfe':
                count = utils.from_uint16_le( buffer[i+2:i+4] )
                data = buffer[i+4:i+6]
                out.extend( data*count )
                i += 6
            else:
                out.extend( word )
                i += 2

        return mrc.TransformResult( payload=bytes( out ), end_offset=i )
Exemple #17
0
    def import_data(self, buffer, parent=None):
        pointer = 0

        dest = bytearray()

        while pointer < len(buffer):
            test = utils.from_int8(buffer[pointer:pointer + 1])
            pointer += 1
            if test > 0:
                dest.extend(buffer[pointer:pointer + test])
                pointer += test
            else:
                count = 1 - test
                al = buffer[pointer]
                pointer += 1
                for i in range(count):
                    dest.append(al)

        return mrc.TransformResult(payload=bytes(dest), end_offset=len(buffer))
Exemple #18
0
    def export_data(self, buffer, parent=None):
        assert utils.is_bytes(buffer)
        assert len(buffer) == 960 * 160

        segments = (buffer[960 * 40 * i:960 * 40 * (i + 1)] for i in range(4))
        segments = (self.plan.export_data(x).payload for x in segments)

        result = bytearray()

        for segment in segments:
            pointer = 0
            while pointer < len(segment):
                start = pointer
                end = pointer + 1
                if end >= len(segment):
                    result.append(0x00)
                    result.append(segment[start])
                    pointer += 1
                elif segment[end] == segment[start]:
                    while ((end + 1) < len(segment)) and (
                            segment[end + 1]
                            == segment[end]) and (end - start < 127):
                        end += 1
                    result.append(257 - (end + 1 - start))
                    result.append(segment[start])
                    pointer = end + 1
                else:
                    while ((end + 1) < len(segment)) and (
                            segment[end + 1] !=
                            segment[end]) and (end - 1 - start < 128):
                        end += 1
                    result.append(end - 1 - start)
                    result.extend(segment[start:end])
                    pointer = end

            result.append(0x80)

        return mrc.TransformResult(payload=bytes(result))
Exemple #19
0
    def import_data(self, buffer, parent=None):
        assert utils.is_bytes(buffer)
        result = []
        buf_out = []
        i = 0
        while i < len(buffer):
            # 0x00 <= n < 0x80: copy next n+1 bytes to output stream
            if buffer[i] in range(0x00, 0x80):
                count = buffer[i] + 1
                buf_out.append(buffer[i + 1:i + 1 + count])
                i += count + 1
            # n == 0x80: end of segment
            elif buffer[i] == 0x80:
                product = b''.join(buf_out)
                if len(product) != self.DECOMPRESSED_SIZE:
                    logger.warning(
                        '{}: was expecting {} bytes of data, got {}'.format(
                            self, self.DECOMPRESSED_SIZE, len(product)))
                result.append(product)
                buf_out = []
                i += 1
            # 0x81 <= n < 0xff: repeat next byte (257-n) times
            else:
                count = 257 - buffer[i]
                buf_out.append(buffer[i + 1:i + 2] * count)
                i += 2

        if buf_out:
            logger.warning(
                '{}: EOF reached before last RLE block closed'.format(self))
            result.append(b''.join(buf_out))

        # result is a 960x160 3bpp image, divided into 4x 40 scanline segments
        unpack = (self.plan.import_data(x).payload for x in result)

        return mrc.TransformResult(payload=bytes(itertools.chain(*unpack)),
                                   end_offset=i)
Exemple #20
0
 def export_data(self, buffer, parent=None):
     # no header, no compression!
     return mrc.TransformResult(payload=buffer)
Exemple #21
0
 def import_data( self, buffer, parent=None ):
     assert utils.is_bytes( buffer )
     stage_1 = self.rle.import_data( buffer )
     stage_2 = self.plan.import_data( stage_1.payload )
     return mrc.TransformResult( payload=stage_2.payload, end_offset=stage_1.end_offset )
Exemple #22
0
 def export_data(self, buffer, parent=None):
     payload = bytes([
         bits.reverse_bits(c ^ self.KEY[i % len(self.KEY)])
         for i, c in enumerate(buffer)
     ])
     return mrc.TransformResult(payload=payload)
Exemple #23
0
    def import_data(self, buffer, parent=None):
        assert utils.is_bytes(buffer)

        pointer = 0
        total_num_bytes = len(buffer)

        bit_count = utils.from_uint8(buffer[pointer:pointer + 1])
        checksum = utils.from_uint8(buffer[pointer + 1:pointer + 2])
        decompressed_size = utils.from_uint32_be(buffer[pointer + 2:pointer +
                                                        6])
        compressed_size = utils.from_uint32_be(buffer[pointer + 6:pointer +
                                                      10])

        pointer += 10
        total_num_bytes -= 10
        compressed_size -= 10

        compressed_data = bytearray(buffer[pointer:pointer + compressed_size])
        if checksum != self._xor_checksum(compressed_data):
            logger.warning('{}: Checksum doesn\'t match header'.format(self))

        pointer += compressed_size
        total_num_bytes -= compressed_size

        # first byte of compressed data is shifted wrongly, fix
        compressed_data[-1] = (compressed_data[-1] << (8 - bit_count)) & 0xff
        bs = bits.BitStream(compressed_data,
                            start_offset=(compressed_size - 1, bit_count - 1),
                            bytes_reverse=True,
                            bit_endian='little',
                            io_endian='big')

        def copy_prev_data(blocklen, offset_size, state):
            offset = bs.read(offset_size)
            for i in range(blocklen):
                state['dptr'] -= 1
                state['ddata'][state['dptr']] = state['ddata'][state['dptr'] +
                                                               offset + 1]
            return

        def dump_data(num_bytes, state):
            for i in range(num_bytes):
                state['dptr'] -= 1
                state['ddata'][state['dptr']] = bs.read(8)
            return

        state = {
            'dptr': decompressed_size,
            'ddata': bytearray(decompressed_size),
        }

        while True:
            if bs.read(1) == 1:
                test = bs.read(2)
                if test == 0:
                    copy_prev_data(3, 9, state)
                elif test == 1:
                    copy_prev_data(4, 10, state)
                elif test == 2:
                    copy_prev_data(bs.read(8) + 1, 12, state)
                elif test == 3:
                    dump_data(bs.read(8) + 9, state)
            else:
                test = bs.read(1)
                if test == 0:
                    dump_data(bs.read(3) + 1, state)
                elif test == 1:
                    copy_prev_data(2, 8, state)
            if not (state['dptr'] > 0):
                break

        return mrc.TransformResult(payload=bytes(state['ddata']),
                                   end_offset=pointer)
Exemple #24
0
 def export_data( self, buffer, parent=None ):
     payload = bytes( [utils.BYTE_REVERSE[c ^ self.KEY[i%len( self.KEY )]]
                         for i, c in enumerate( buffer )] )
     return mrc.TransformResult( payload=payload )
Exemple #25
0
    def export_data(self, buffer: bytes, parent=None):
        assert utils.is_bytes(buffer)

        # load in constructor properties
        bpp = mrc.property_get(self.bpp, parent)
        width = mrc.property_get(self.width, parent)
        height = mrc.property_get(self.height, parent)
        plane_size = mrc.property_get(self.plane_size, parent)
        plane_padding = mrc.property_get(self.plane_padding, parent)
        frame_offset = mrc.property_get(self.frame_offset, parent)
        frame_count = mrc.property_get(self.frame_count, parent)
        frame_stride = mrc.property_get(self.frame_stride, parent)

        assert (bpp >= 0) and (bpp <= 8)
        if (width or height):
            assert (width * height) % 8 == 0
            if plane_size:
                raise Exception(
                    'Can\'t define plane_size when either width or height is defined.'
                )
        elif plane_size is None and frame_count == 1:
            # for a single frame without a plane size, assume the buffer contains everything
            assert len(buffer) % bpp == 0
            plane_size = len(buffer) // bpp
        else:
            assert plane_size is not None

        if not plane_size:
            plane_size = math.ceil(width * height / 8)

        assert (frame_count >= 1)
        if frame_count >= 2 and frame_stride is None:
            frame_stride = bpp * (plane_size + plane_padding)
        else:
            frame_stride = frame_stride if frame_stride is not None else 0

        if frame_count == 1:
            assert len(buffer) >= frame_offset + plane_size * 8
        else:
            assert len(buffer) >= frame_offset + frame_count * frame_stride

        # this method just does the opposite of the above; split chunky pixels back into planes.
        planes = array('Q')
        segment_size = plane_size + plane_padding
        if frame_count == 1:
            raw_planes = bytearray(frame_offset + segment_size * bpp)
        else:
            raw_planes = bytearray(frame_offset + frame_count * frame_stride)

        for f in range(frame_count):
            pointer = frame_offset + f * frame_stride
            planes = planes[0:0]
            # load our chunky pixels into the 64-bit int array
            planes.frombytes(buffer[f * plane_size * 8:(f + 1) * plane_size *
                                    8])
            # check for endianness!
            if sys.byteorder == 'little':
                planes.byteswap()

            for b in range(bpp):
                for i in range(plane_size):
                    # for each group of 8 chunky pixels, use pack_bits to fill up 8 bits
                    # of the relevant bitplane
                    raw_planes[pointer + b * segment_size +
                               i] = bits.pack_bits((planes[i] >> b))

        return mrc.TransformResult(payload=raw_planes)
Exemple #26
0
    def export_data(self, buffer, parent=None):
        assert utils.is_bytes(buffer)

        decompressed_size = len(buffer)

        bs = utils.BitWriter(bits_reverse=True)

        pointer = 0

        def encode_raw_data(length, bs):
            assert length <= 255 + 9

            if length > 8:
                bs.put_bits(length - 9, 8)
                bs.put_bits(0x7, 3)
            elif length > 0:
                bs.put_bits(length - 1, 3)
                bs.put_bits(0x0, 2)

        def find_reference():
            # main form of compression is of the form:
            # - while decompressing from end to start
            # - look forward [up to max_offset] bytes in the decompressed data
            # - copy [up to max_length] bytes to the current decompression position
            # the largest offset supported by the file format is 4096, but this means
            # every call to find_reference loops 4096 times.
            # this takes foreeeever in Python!
            # because the compression is worthless and time is money, max_offset has
            # been slashed to 16 to speed up proceedings.
            #max_offset = (1 << 12) + 1
            max_offset = (1 << 4) + 1
            # largest length supported by the file format is 256
            max_length = (1 << 8) + 1

            length = 4  # throw away short references
            offset = 0
            short_offset = [0, 0, 0]

            for i in range(pointer + 1, pointer + max_offset):
                temp_len = 0
                while (temp_len < max_length) and (i + temp_len <
                                                   decompressed_size):
                    # record short references
                    if (temp_len >= 2) and (temp_len <= 4):
                        if short_offset[temp_len - 2] == 0:
                            short_offset[temp_len - 2] = i - pointer

                    if buffer[pointer + temp_len] != buffer[i + temp_len]:
                        break
                    temp_len += 1

                if temp_len == max_length:
                    temp_len -= 1

                # largest reference so far? use it
                if temp_len > length:
                    length = temp_len
                    offset = i - pointer

            assert length < max_length
            assert offset < max_offset

            # no long references? try short
            if (offset == 0):
                for i in (2, 1, 0):
                    max_short_offset = (1 << (i + 8)) + 1
                    if (short_offset[i] > 0) and (short_offset[i] <
                                                  max_short_offset):
                        length = i + 2
                        offset = short_offset[i]
                        break

            return length, offset

        raw = 0
        while pointer < decompressed_size:
            length, ref = find_reference()
            if ref > 0:
                if raw > 0:
                    encode_raw_data(raw, bs)
                    raw = 0
                if length > 4:
                    bs.put_bits(ref - 1, 12)
                    bs.put_bits(length - 1, 8)
                    bs.put_bits(0x6, 3)
                elif length == 4:
                    bs.put_bits(ref - 1, 10)
                    bs.put_bits(0x5, 3)
                elif length == 3:
                    bs.put_bits(ref - 1, 9)
                    bs.put_bits(0x4, 3)
                elif length == 2:
                    bs.put_bits(ref - 1, 8)
                    bs.put_bits(0x1, 2)

                pointer += length
            else:
                bs.put_bits(buffer[pointer], 8)

                raw += 1
                if raw == 264:
                    encode_raw_data(raw, bs)
                    raw = 0

                pointer += 1

        encode_raw_data(raw, bs)

        compressed_data = bs.get_buffer()
        compressed_size = len(compressed_data) + 10
        checksum = self._xor_checksum(compressed_data)

        output = bytearray(6)
        output[0:1] = utils.to_uint8(8 - (bs.bits_remaining % 8))
        output[1:2] = utils.to_uint8(checksum)
        output[2:6] = utils.to_uint32_be(decompressed_size)
        output[6:10] = utils.to_uint32_be(compressed_size)
        output.extend(compressed_data)

        return mrc.TransformResult(payload=bytes(output))
Exemple #27
0
    def import_data(self, buffer):
        output_size = utils.from_uint32_le(buffer[:4])
        edx = output_size
        data_p = 4
        bx = 0
        cx = 0
        work_ram = bytearray(0x1000)
        output = bytearray()

        while True:
            cx >>= 1
            if cx < 0x100:
                logger.debug('@ new pattern: {:08b}'.format(buffer[data_p]))
                cx = buffer[data_p] + 0xff00
                data_p += 1

            if not (cx & 1):
                info = buffer[data_p] + (buffer[data_p + 1] << 8)
                data_p += 2
                work_p = info & 0xfff
                count = (info >> 12) + 3
                logger.debug(
                    '# work_ram[0x{:04x}:0x{:04x}] = work_ram[0x{:04x}:0x{:04x}]'
                    .format(bx, (bx + count) & 0xfff, work_p,
                            (work_p + count) & 0xfff))
                logger.debug(
                    '! output[0x{:04x}:0x{:04x}] = work_ram[0x{:04x}:0x{:04x}]'
                    .format(len(output),
                            len(output) + count, work_p,
                            (work_p + count) & 0xfff))
                for i in range(count):
                    # loc_103C4
                    dat = work_ram[work_p]
                    work_ram[bx] = dat
                    work_p += 1
                    work_p &= 0xfff
                    bx += 1
                    bx &= 0xfff
                    output.append(dat)

                    edx -= 1
                    if edx == 0:
                        break

                if edx == 0:
                    break

            else:
                logger.debug('# work_ram[0x{:04x}] = buffer[0x{:04x}]'.format(
                    bx, data_p))
                logger.debug('! output[0x{:04x}] = buffer[0x{:04x}]'.format(
                    len(output), data_p))
                dat = buffer[data_p]
                work_ram[bx] = dat
                data_p += 1
                bx += 1
                bx &= 0xfff
                output.append(dat)
                edx -= 1
                if edx == 0:
                    break

        logger.info(
            '{} - output_size: {:08x}, output_end: {:08x}, input_size: {:08x}, input_end: {:08x}'
            .format(self, output_size, len(output), len(buffer), data_p))

        return mrc.TransformResult(payload=bytes(output), end_offset=data_p)
Exemple #28
0
    def import_data(self, buffer, parent=None):
        assert utils.is_bytes(buffer)

        pointer = 0
        total_num_bytes = len(buffer)

        bit_count = utils.from_uint8(buffer[pointer:pointer + 1])
        checksum = utils.from_uint8(buffer[pointer + 1:pointer + 2])
        decompressed_size = utils.from_uint32_be(buffer[pointer + 2:pointer +
                                                        6])
        compressed_size = utils.from_uint32_be(buffer[pointer + 6:pointer +
                                                      10])

        pointer += 10
        total_num_bytes -= 10
        compressed_size -= 10

        compressed_data = buffer[pointer:pointer + compressed_size]
        if checksum != self._xor_checksum(compressed_data):
            logger.warning('{}: Checksum doesn\'t match header'.format(self))

        pointer += compressed_size
        total_num_bytes -= compressed_size

        bs = utils.BitReader(compressed_data,
                             compressed_size - 1,
                             bytes_reverse=True,
                             output_reverse=True)
        bs.bits_remaining = bit_count

        def copy_prev_data(blocklen, offset_size, state):
            offset = bs.get_bits(offset_size)
            for i in range(blocklen):
                state['dptr'] -= 1
                state['ddata'][state['dptr']] = state['ddata'][state['dptr'] +
                                                               offset + 1]
            return

        def dump_data(num_bytes, state):
            for i in range(num_bytes):
                state['dptr'] -= 1
                state['ddata'][state['dptr']] = bs.get_bits(8)
            return

        state = {
            'dptr': decompressed_size,
            'ddata': bytearray(decompressed_size),
        }

        while True:
            if bs.get_bits(1) == 1:
                test = bs.get_bits(2)
                if test == 0:
                    copy_prev_data(3, 9, state)
                elif test == 1:
                    copy_prev_data(4, 10, state)
                elif test == 2:
                    copy_prev_data(bs.get_bits(8) + 1, 12, state)
                elif test == 3:
                    dump_data(bs.get_bits(8) + 9, state)
            else:
                test = bs.get_bits(1)
                if test == 0:
                    dump_data(bs.get_bits(3) + 1, state)
                elif test == 1:
                    copy_prev_data(2, 8, state)
            if not (state['dptr'] > 0):
                break

        return mrc.TransformResult(payload=bytes(state['ddata']),
                                   end_offset=pointer)
Exemple #29
0
    def import_data(self, buffer: bytes, parent=None):
        assert utils.is_bytes(buffer)

        # load in constructor properties
        bpp = mrc.property_get(self.bpp, parent)
        width = mrc.property_get(self.width, parent)
        height = mrc.property_get(self.height, parent)
        plane_size = mrc.property_get(self.plane_size, parent)
        plane_padding = mrc.property_get(self.plane_padding, parent)
        frame_offset = mrc.property_get(self.frame_offset, parent)
        frame_count = mrc.property_get(self.frame_count, parent)
        frame_stride = mrc.property_get(self.frame_stride, parent)
        row_planar_size = mrc.property_get(self.row_planar_size, parent)
        plane_order = mrc.property_get(self.plane_order, parent)

        assert (bpp >= 0) and (bpp <= 8)
        if (width or height):
            assert (width * height) % 8 == 0
            if plane_size:
                raise Exception(
                    'Can\'t define plane_size when either width or height is defined.'
                )
        elif plane_size is None and frame_count == 1:
            # for a single frame without a plane size, assume the buffer contains everything
            assert len(buffer) % bpp == 0
            plane_size = len(buffer) // bpp
        else:
            assert plane_size is not None
        assert (frame_count >= 1)

        if plane_size is None:
            plane_size = math.ceil(width * height / 8)

        if frame_count >= 2 and frame_stride is None:
            frame_stride = bpp * (plane_size + plane_padding)
        else:
            frame_stride = frame_stride if frame_stride is not None else 0

        if row_planar_size:
            assert row_planar_size >= 1

        if not plane_order:
            plane_order = range(bpp)
        else:
            assert all([y in range(bpp) for y in plane_order])
            assert len(plane_order) == len(set(plane_order))

        # because frame_stride can potentially read past the buffer, only worry about measuring
        # the last n-1 strides + one frame
        assert len(buffer) >= frame_offset + (
            frame_count - 1) * frame_stride + bpp * plane_size

        # our output is going to be "chunky"; each byte is a pixel (8-bit or 256 colour mode)
        raw_image = bytearray(plane_size * frame_count)

        # the input is planar. this is a packed format found occasionally in old graphics hardware,
        # and in old image formats where space was paramount.
        # the trick is you can have less than 8 bits in your colourspace!
        # e.g. if you only need 8 colours, you can get away with a 3-bit colourspace and save 62.5% space.
        # instead of each byte being a pixel, each byte stores 8 pixels worth of data for a single plane.
        # there is one plane per bit of colourspace, and the planes are stored one after another.

        # in order for the calculations to be fast, planar graphics are pretty much always divisible by 8.
        # we're going to abuse this and unpack our bitplanes using 64-bit integers.
        # let's make a big array of them.
        planes = array('Q', (0, ) * (plane_size))
        segment_size = plane_size + plane_padding

        for f in range(frame_count):
            pointer = frame_offset + f * frame_stride
            for bi, b in enumerate(plane_order):
                for i in range(plane_size):
                    # for the first iteration, clear the plane
                    if bi == 0:
                        planes[i] = 0

                    if row_planar_size is None:
                        address = pointer + b * segment_size + i
                    else:
                        address = pointer + (row_planar_size * bpp) * (
                            i // row_planar_size) + row_planar_size * b + (
                                i % row_planar_size)

                    # bits.unpack_bits is a helper method which converts a 1-byte bitfield
                    # into 8 bool bytes (i.e. 1 or 0) stored as a 64-bit int.
                    # we can effectively work on 8 chunky pixels at once!
                    # because the chunky pixels are bitfields, combining planes is an easy
                    # left shift (i.e. move all the bits up by [plane ID] places) and bitwise OR
                    planes[i] |= bits.unpack_bits(buffer[address]) << bi

            # check for endianness! for most intel and ARM chips the order of bytes in hardware is reversed,
            # so we need to flip it around for the bytes to be sequential.
            if sys.byteorder == 'little':
                planes.byteswap()

            # convert our planes array to bytes, and you have your chunky pixels
            raw_image[f * plane_size * 8:(f + 1) * plane_size *
                      8] = planes.tobytes()

        if frame_count > 1:
            end_offset = frame_offset + frame_count * frame_stride
        else:
            plane_bits = plane_size * 8 * bpp
            end_offset = frame_offset + (plane_bits) // 8 + (1 if (plane_bits %
                                                                   8) else 0)

        return mrc.TransformResult(payload=bytes(raw_image),
                                   end_offset=end_offset)