def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) try: self._name, self.handler, self._description = self.TAG_INFO[self["tag"].value] except KeyError: self.handler = None size = self["size"] self._size = size.address + size.size + alignValue(size.value, 2) * 8
def __init__(self, *args): FieldSet.__init__(self, *args) self._size = (8 + alignValue(self["size"].value, 2)) * 8 tag = self["type"].value if tag in self.TAG_INFO: self._name, self._description, self._parser = self.TAG_INFO[tag] else: self._parser = None
def createFields(self): group = self["../group_desc/group[%u]" % self.uniq_id] superblock = self["/superblock"] block_size = self["/"].block_size # Read block bitmap addr = self.absolute_address + 56 * 8 self.superblock_copy = (self.stream.readBytes(addr, 2) == "\x53\xEF") if self.superblock_copy: yield SuperBlock(self, "superblock_copy") # Compute number of block and inodes block_count = superblock["blocks_per_group"].value inode_count = superblock["inodes_per_group"].value block_index = self.uniq_id * block_count inode_index = self.uniq_id * inode_count if (block_count % 8) != 0: raise ParserError("Invalid block count") if (inode_count % 8) != 0: raise ParserError("Invalid inode count") block_count = min(block_count, superblock["blocks_count"].value - block_index) inode_count = min(inode_count, superblock["inodes_count"].value - inode_index) # Read block bitmap field = self.seekByte(group["block_bitmap"].value * block_size, relative=False, null=True) if field: yield field yield BlockBitmap(self, "block_bitmap", block_index, block_count, "Block bitmap") # Read inode bitmap field = self.seekByte(group["inode_bitmap"].value * block_size, relative=False) if field: yield field yield InodeBitmap(self, "inode_bitmap", inode_index, inode_count, "Inode bitmap") # Read inode table field = self.seekByte(alignValue(self.current_size // 8, block_size)) if field: yield field yield InodeTable(self, "inode_table", inode_index, inode_count) # Add padding if needed addr = min(self.parent.size / 8, (self.uniq_id + 1) * superblock["blocks_per_group"].value * block_size) yield self.seekByte(addr, "data", relative=False)
def setCheckedSizes(self, size): # First set size so that end is aligned, if needed self.real_size = size size *= 8 if self.ALIGN: size = alignValue(self.absolute_address+size, 8*self.ALIGN) \ - self.absolute_address if self._parent._size: if self._parent.current_size + size > self._parent._size: size = self._parent._size - self._parent.current_size self._size = size
def hexadecimal(field): """ Convert an integer to hexadecimal in lower case. Returns unicode string. >>> hexadecimal(type("", (), dict(value=412, size=16))) u'0x019c' >>> hexadecimal(type("", (), dict(value=0, size=32))) u'0x00000000' """ assert hasattr(field, "value") and hasattr(field, "size") size = field.size padding = alignValue(size, 4) // 4 pattern = u"0x%%0%ux" % padding return pattern % field.value
def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = (8 + alignValue(self["size"].value, 2)) * 8 tag = self["tag"].value if tag in self.TAG_INFO: self.tag_info = self.TAG_INFO[tag] if tag == "LIST": subtag = self["subtag"].value if subtag in self.subtag_info: info = self.subtag_info[subtag] self.tag_info = (info[0], None, info[1]) self._name = self.tag_info[0] self._description = self.tag_info[2] else: self.tag_info = ("field[]", None, None)
def createFields(self): group = self["../group_desc/group[%u]" % self.uniq_id] superblock = self["/superblock"] block_size = self["/"].block_size # Read block bitmap addr = self.absolute_address + 56*8 self.superblock_copy = (self.stream.readBytes(addr, 2) == "\x53\xEF") if self.superblock_copy: yield SuperBlock(self, "superblock_copy") # Compute number of block and inodes block_count = superblock["blocks_per_group"].value inode_count = superblock["inodes_per_group"].value block_index = self.uniq_id * block_count inode_index = self.uniq_id * inode_count if (block_count % 8) != 0: raise ParserError("Invalid block count") if (inode_count % 8) != 0: raise ParserError("Invalid inode count") block_count = min(block_count, superblock["blocks_count"].value - block_index) inode_count = min(inode_count, superblock["inodes_count"].value - inode_index) # Read block bitmap field = self.seekByte(group["block_bitmap"].value * block_size, relative=False, null=True) if field: yield field yield BlockBitmap(self, "block_bitmap", block_index, block_count, "Block bitmap") # Read inode bitmap field = self.seekByte(group["inode_bitmap"].value * block_size, relative=False) if field: yield field yield InodeBitmap(self, "inode_bitmap", inode_index, inode_count, "Inode bitmap") # Read inode table field = self.seekByte(alignValue(self.current_size//8, block_size)) if field: yield field yield InodeTable(self, "inode_table", inode_index, inode_count) # Add padding if needed addr = min(self.parent.size / 8, (self.uniq_id+1) * superblock["blocks_per_group"].value * block_size) yield self.seekByte(addr, "data", relative=False)
def createFields(self): yield String(self, "signature", 4, "8BIM signature", charset="ASCII") if self["signature"].value != "8BIM": raise ParserError("Stream doesn't look like 8BIM item (wrong signature)!") yield textHandler(UInt16(self, "tag"), hexadecimal) if self.stream.readBytes(self.absolute_address + self.current_size, 4) != "\0\0\0\0": yield PascalString8(self, "name") size = 2 + (self["name"].size // 8) % 2 yield NullBytes(self, "name_padding", size) else: yield String(self, "name", 4, strip="\0") yield UInt16(self, "size") size = alignValue(self["size"].value, 2) if not size: return if self.handler: yield self.handler(self, "content", size=size*8) else: yield RawBytes(self, "content", size)
def __init__(self, parent, name, is_32bit=True): FieldSet.__init__(self, parent, name) self._size = alignValue(self["size"].value, 4) * 8 self.is_32bit = is_32bit
def __init__(self, parent, name, format, description=None, strip=None, charset=None, nbytes=None, truncate=None): Bytes.__init__(self, parent, name, 1, description) # Is format valid? assert format in self.VALID_FORMATS # Store options self._format = format self._strip = strip self._truncate = truncate # Check charset and compute character size in bytes # (or None when it's not possible to guess character size) if not charset or charset in self.CHARSET_8BIT: self._character_size = 1 # one byte per character elif charset in self.UTF_CHARSET: self._character_size = None else: raise FieldError("Invalid charset for %s: \"%s\"" % (self.path, charset)) self._charset = charset # It is a fixed string? if nbytes is not None: assert self._format == "fixed" # Arbitrary limits, just to catch some bugs... if not (1 <= nbytes <= 0xffff): raise FieldError("Invalid string size for %s: %s" % (self.path, nbytes)) self._content_size = nbytes # content length in bytes self._size = nbytes * 8 self._content_offset = 0 else: # Format with a suffix: Find the end of the string if self._format in self.SUFFIX_FORMAT: self._content_offset = 0 # Choose the suffix suffix = self.suffix_str # Find the suffix length = self._parent.stream.searchBytesLength( suffix, False, self.absolute_address) if length is None: raise FieldError("Unable to find end of string %s (format %s)!" % (self.path, self._format)) if 1 < len(suffix): # Fix length for little endian bug with UTF-xx charset: # u"abc" -> "a\0b\0c\0\0\0" (UTF-16-LE) # search returns length=5, whereas real lenght is 6 length = alignValue(length, len(suffix)) # Compute sizes self._content_size = length # in bytes self._size = (length + len(suffix)) * 8 # Format with a prefix: Read prefixed length in bytes else: assert self._format in self.PASCAL_FORMATS # Get the prefix size prefix_size = self.PASCAL_FORMATS[self._format] self._content_offset = prefix_size # Read the prefix and compute sizes value = self._parent.stream.readBits( self.absolute_address, prefix_size*8, self._parent.endian) self._content_size = value # in bytes self._size = (prefix_size + value) * 8 # For UTF-16 and UTF-32, choose the right charset using BOM if self._charset in self.UTF_CHARSET: # Charset requires a BOM? bomsize, endian = self.UTF_CHARSET[self._charset] if endian == "BOM": # Read the BOM value nbytes = bomsize // 8 bom = self._parent.stream.readBytes(self.absolute_address, nbytes) # Choose right charset using the BOM bom_endian = self.UTF_BOM[bomsize] if bom not in bom_endian: raise FieldError("String %s has invalid BOM (%s)!" % (self.path, repr(bom))) self._charset = bom_endian[bom] self._content_size -= nbytes self._content_offset += nbytes # Compute length in character if possible if self._character_size: self._length = self._content_size // self._character_size else: self._length = None
def __init__(self, parent, name): FieldSet.__init__(self, parent, name) self._size = alignValue(self["size"].value, 4) * 8
def createFields(self): yield String(self, "magic", 4, "Signature (BLP2)") yield Enum(UInt32(self, "compression", "Compression type"), { 0:"JPEG Compressed", 1:"Uncompressed or DXT/S3TC compressed"}) yield Enum(UInt8(self, "encoding", "Encoding type"), { 1:"Raw", 2:"DXT/S3TC Texture Compression (a.k.a. DirectX)"}) yield UInt8(self, "alpha_depth", "Alpha channel depth, in bits (0 = no alpha)") yield Enum(UInt8(self, "alpha_encoding", "Encoding used for alpha channel"), { 0:"DXT1 alpha (0 or 1 bit alpha)", 1:"DXT3 alpha (4 bit alpha)", 7:"DXT5 alpha (8 bit interpolated alpha)"}) yield Enum(UInt8(self, "has_mips", "Are mip levels present?"), { 0:"No mip levels", 1:"Mip levels present; number of levels determined by image size"}) yield UInt32(self, "width", "Base image width") yield UInt32(self, "height", "Base image height") for i in xrange(16): yield UInt32(self, "mipmap_offset[]") for i in xrange(16): yield UInt32(self, "mipmap_size[]") yield PaletteRGBA(self, "palette", 256) compression = self["compression"].value encoding = self["encoding"].value alpha_depth = self["alpha_depth"].value alpha_encoding = self["alpha_encoding"].value width = self["width"].value height = self["height"].value if compression == 0: # JPEG Compression yield UInt32(self, "jpeg_header_len") yield RawBytes(self, "jpeg_header", self["jpeg_header_len"].value, "Shared JPEG Header") offsets = self.array("mipmap_offset") sizes = self.array("mipmap_size") for i in xrange(16): if not offsets[i].value or not sizes[i].value: continue padding = self.seekByte(offsets[i].value) if padding: yield padding if compression == 0: yield RawBytes(self, "mipmap[%i]" % i, sizes[i].value, "JPEG data, append to header to recover complete image") elif compression == 1 and encoding == 1: yield Generic2DArray(self, "mipmap_indexes[%i]" % i, height, width, PaletteIndex, "row", "index", "Indexes into the palette") if alpha_depth == 1: yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, Bit, "row", "is_opaque", "Alpha values") elif alpha_depth == 8: yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, UInt8, "row", "alpha", "Alpha values") elif compression == 1 and encoding == 2: block_height = alignValue(height, 4) // 4 block_width = alignValue(width, 4) // 4 if alpha_depth in [0, 1] and alpha_encoding == 0: yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT1, "row", "block", "DXT1-compressed image blocks") elif alpha_depth == 8 and alpha_encoding == 1: yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT3, "row", "block", "DXT3-compressed image blocks") elif alpha_depth == 8 and alpha_encoding == 7: yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT5, "row", "block", "DXT5-compressed image blocks") width /= 2 height /= 2
def __init__(self, parent, name, width, pixel_class): FieldSet.__init__(self, parent, name) self._pixel = pixel_class self._width = width self._size = alignValue(self._width * self._pixel.static_size, 32)