def resource_stop_check(buffer, offset): # we want to look at the previous entry to the current one, ignore if we're at the first entry if (offset < 8): return False # too near the end of the file? that's a paddling if (offset >= len(buffer) - 8): return True block_offset = utils.from_uint32_le(buffer[offset - 8:offset - 4]) block_size = utils.from_uint32_le(buffer[offset - 4:offset]) # ignore filler entries if (block_offset == 0xffffffff) and (block_size == 0): return False # check for references to the end of the file return block_offset + block_size >= len(buffer) - 1
def import_data( self, buffer ): output_size = utils.from_uint32_le( buffer[:4] ) edx = output_size data_p = 4 bx = 0 cx = 0 work_ram = bytearray( 0x1000 ) output = bytearray() while True: cx >>= 1 if cx < 0x100: logger.debug( '@ new pattern: {:08b}'.format( buffer[data_p] ) ) cx = buffer[data_p] + 0xff00 data_p += 1 if not (cx & 1): info = buffer[data_p] + (buffer[data_p+1] << 8) data_p += 2 work_p = info & 0xfff count = (info >> 12) + 3 logger.debug( '# work_ram[0x{:04x}:0x{:04x}] = work_ram[0x{:04x}:0x{:04x}]'.format( bx, (bx+count) & 0xfff, work_p, (work_p+count) & 0xfff ) ) logger.debug( '! output[0x{:04x}:0x{:04x}] = work_ram[0x{:04x}:0x{:04x}]'.format( len( output ), len( output )+count, work_p, (work_p+count) & 0xfff ) ) for i in range( count ): # loc_103C4 dat = work_ram[work_p] work_ram[bx] = dat work_p += 1 work_p &= 0xfff bx += 1 bx &= 0xfff output.append( dat ) edx -= 1 if edx == 0: break if edx == 0: break else: logger.debug( '# work_ram[0x{:04x}] = buffer[0x{:04x}]'.format( bx, data_p ) ) logger.debug( '! output[0x{:04x}] = buffer[0x{:04x}]'.format( len( output ), data_p ) ) dat = buffer[data_p] work_ram[bx] = dat data_p += 1 bx += 1 bx &= 0xfff output.append( dat ) edx -= 1 if edx == 0: break logger.info( '{} - output_size: {:08x}, output_end: {:08x}, input_size: {:08x}, input_end: {:08x}'.format( self, output_size, len( output ), len( buffer ), data_p ) ) return mrc.TransformResult( payload=bytes( output ), end_offset=data_p )
def import_data(self, buffer, parent=None): if len(buffer) == 0: return mrc.TransformResult() lc = lzss.LZSSCompressor() size_comp = utils.from_uint32_le(buffer[0:4]) if size_comp != len(buffer): logger.info('{}: File not compressed'.format(self)) return mrc.TransformResult(payload=buffer, end_offset=len(buffer)) size_raw = utils.from_uint32_le(buffer[4:8]) result = lc.import_data(buffer[8:]) if len(result.payload) != size_raw: logger.warning( '{}: Was expecting a decompressed size of {}, got {}!'.format( self, size_raw, len(result.payload))) return result
def import_data(self, buffer, parent=None): decomp_size = utils.from_uint32_le(buffer[:4]) max_bits = utils.from_uint16_le(buffer[4:6]) # should be 12 lookup = [bytes((i, )) for i in range(256)] lookup.append(None) # 256: error lookup.append(None) # 257: end of data output = bytearray() bs = bits.BitStream(buffer, 6, bit_endian='big', io_endian='big') state = {'usebits': 9} def add_to_lookup(state, entry): if len(lookup) < (1 << max_bits): logger.debug('lookup[{}] = {}'.format(len(lookup), entry)) lookup.append(entry) if len(lookup) == (1 << state['usebits']) - 1: state['usebits'] = min(state['usebits'] + 1, max_bits) logger.debug('usebits = {}'.format(state['usebits'])) return fcode = bs.read(state['usebits']) match = lookup[fcode] logger.debug('fcode={},match={}'.format(fcode, match)) output.extend(match) while True: ncode = bs.read(state['usebits']) logger.debug('ncode={}'.format(ncode)) if ncode == 257: # end of data break elif ncode == 256: # error raise Exception('Found error code, data is not valid') elif ncode < len(lookup): nmatch = lookup[ncode] else: nmatch = match + match[0:1] logger.debug('match={}'.format(match)) logger.debug('nmatch={}'.format(nmatch)) output.extend(nmatch) # add code to lookup add_to_lookup(state, match + nmatch[0:1]) match = nmatch if len(output) != decomp_size: logger.warning( '{}: was expecting data of size {}, got data of size {} instead' .format(self, decomp_size, len(output))) return mrc.TransformResult(payload=bytes(output), end_offset=len(buffer))
def import_data( self, buffer, parent=None ): decomp_size = utils.from_uint32_le( buffer[:4] ) max_bits = utils.from_uint16_le( buffer[4:6] ) # should be 12 lookup = [bytes((i,)) for i in range( 256 )] lookup.append( None ) # 256: error lookup.append( None ) # 257: end of data output = bytearray() bs = utils.BitReader( buffer, 6, bits_reverse=True, output_reverse=True ) state = {'usebits': 9} def add_to_lookup( state, entry ): if len( lookup ) < (1 << max_bits): logger.debug( 'lookup[{}] = {}'.format( len( lookup ), entry ) ) lookup.append( entry ) if len( lookup ) == (1 << state['usebits'])-1: state['usebits'] = min( state['usebits']+1, max_bits ) logger.debug( 'usebits = {}'.format(state['usebits']) ) return fcode = bs.get_bits( state['usebits'] ) match = lookup[fcode] logger.debug( 'fcode={},match={}'.format( fcode, match ) ) output.extend( match ) while True: ncode = bs.get_bits( state['usebits'] ) logger.debug( 'ncode={}'.format( ncode ) ) if ncode == 257: # end of data break elif ncode == 256: # error raise Exception( 'Found error code, data is not valid' ) elif ncode < len( lookup ): nmatch = lookup[ncode] else: nmatch = match+match[0:1] logger.debug( 'match={}'.format(match) ) logger.debug( 'nmatch={}'.format(nmatch) ) output.extend( nmatch ) # add code to lookup add_to_lookup( state, match+nmatch[0:1] ) match = nmatch if len( output ) != decomp_size: logger.warning( '{}: was expecting data of size {}, got data of size {} instead'.format( self, decomp_size, len( output ) ) ) return mrc.TransformResult( payload=bytes( output ), end_offset=len( buffer ) )
def import_data( self, buffer, parent=None ): final_length = utils.from_uint32_le( buffer[0:4] ) i = 4 out = bytearray() while (len( out ) < final_length): byte = buffer[i] if byte >= 128: out.extend( buffer[i+1:i+byte-126] ) i += byte-126 else: out.extend( buffer[i+1:i+2]*(byte+3) ) i += 2 return mrc.TransformResult( payload=bytes( out ), end_offset=i )
def import_data( self, buffer, parent=None ): final_length = utils.from_uint32_le( buffer[0:4] ) i = 4 out = bytearray() while (len( out ) < final_length): word = buffer[i:i+2] if word == b'\xfe\xfe': count = utils.from_uint16_le( buffer[i+2:i+4] ) data = buffer[i+4:i+6] out.extend( data*count ) i += 6 else: out.extend( word ) i += 2 return mrc.TransformResult( payload=bytes( out ), end_offset=i )
def offset_stream_end( buffer, offset ): return offset >= utils.from_uint32_le( buffer[0:4] )
def import_data(self, buffer): output_size = utils.from_uint32_le(buffer[:4]) edx = output_size data_p = 4 bx = 0 cx = 0 work_ram = bytearray(0x1000) output = bytearray() while True: cx >>= 1 if cx < 0x100: logger.debug('@ new pattern: {:08b}'.format(buffer[data_p])) cx = buffer[data_p] + 0xff00 data_p += 1 if not (cx & 1): info = buffer[data_p] + (buffer[data_p + 1] << 8) data_p += 2 work_p = info & 0xfff count = (info >> 12) + 3 logger.debug( '# work_ram[0x{:04x}:0x{:04x}] = work_ram[0x{:04x}:0x{:04x}]' .format(bx, (bx + count) & 0xfff, work_p, (work_p + count) & 0xfff)) logger.debug( '! output[0x{:04x}:0x{:04x}] = work_ram[0x{:04x}:0x{:04x}]' .format(len(output), len(output) + count, work_p, (work_p + count) & 0xfff)) for i in range(count): # loc_103C4 dat = work_ram[work_p] work_ram[bx] = dat work_p += 1 work_p &= 0xfff bx += 1 bx &= 0xfff output.append(dat) edx -= 1 if edx == 0: break if edx == 0: break else: logger.debug('# work_ram[0x{:04x}] = buffer[0x{:04x}]'.format( bx, data_p)) logger.debug('! output[0x{:04x}] = buffer[0x{:04x}]'.format( len(output), data_p)) dat = buffer[data_p] work_ram[bx] = dat data_p += 1 bx += 1 bx &= 0xfff output.append(dat) edx -= 1 if edx == 0: break logger.info( '{} - output_size: {:08x}, output_end: {:08x}, input_size: {:08x}, input_end: {:08x}' .format(self, output_size, len(output), len(buffer), data_p)) return mrc.TransformResult(payload=bytes(output), end_offset=data_p)
def offset_stream_end(buffer, offset): return offset >= utils.from_uint32_le(buffer[0:4])