def createFields(self): m2ts = self.is_m2ts() while not self.eof: current = self.current_size next_sync = current if m2ts: next_sync += 4 * 8 sync = self.stream.searchBytes("\x47", current, current + MAX_PACKET_SIZE * 8) if sync is None: raise ParserError("Unable to find synchronization byte") elif sync > next_sync: yield RawBytes(self, "incomplete_packet[]", (sync - current) // 8) yield Packet(self, "packet[]", m2ts=m2ts)
def parseDefineSound(parent, size): yield UInt16(parent, "sound_id") yield Bit(parent, "is_stereo") yield Bit(parent, "is_16bit") yield textHandler(Bits(parent, "rate", 2), bit2hertz) yield Enum(Bits(parent, "codec", 4), SOUND_CODEC) yield UInt32(parent, "sample_count") if parent["codec"].value == SOUND_CODEC_MP3: yield UInt16(parent, "len") size = (parent.size - parent.current_size) // 8 if size: yield RawBytes(parent, "music_data", size)
def createFields(self): # Find synchronisation bytes padding = self.synchronize() if padding: yield padding while self.current_size < self.size: yield Frame(self, "frame[]") # padding = self.synchronize() # if padding: # yield padding # Read raw bytes at the end (if any) size = (self.size - self.current_size) // 8 if size: yield RawBytes(self, "raw", size)
def createFields(self): yield Enum(UInt8(self, "code", "Code"), self.code_name) code = self["code"].value if code == self.NOP: return yield UInt8(self, "length", "Option size in bytes") if code == self.MAX_SEGMENT: yield UInt16(self, "max_seg", "Maximum segment size") elif code == self.WINDOW_SCALE: yield UInt8(self, "win_scale", "Window scale") elif code == self.TIMESTAMP: yield UInt32(self, "ts_val", "Timestamp value") yield UInt32(self, "ts_ecr", "Timestamp echo reply") else: size = (self.size - self.current_size) // 8 if size: yield RawBytes(self, "data", size)
def createFields(self): yield UInt32(self, "size") self._size = (self['size'].value + 4) * 8 yield UInt16(self, "unknown[]") yield UInt8(self, "video_count", "Number of video stream entries") yield UInt8(self, "audio_count", "Number of video stream entries") yield UInt8(self, "subtitle_count", "Number of presentation graphics/text subtitle entries") yield UInt8(self, "ig_count", "Number of interactive graphics entries") yield RawBytes(self, "unknown[]", 8) for i in xrange(self['video_count'].value): yield AVCHDMPLS_Stream(self, "video[]") for i in xrange(self['audio_count'].value): yield AVCHDMPLS_Stream(self, "audio[]") for i in xrange(self['subtitle_count'].value): yield AVCHDMPLS_Stream(self, "subtitle[]") for i in xrange(self['ig_count'].value): yield AVCHDMPLS_Stream(self, "ig[]")
def yieldChunks(self, obj): while len(self.chunks) > 0: chunk = self.chunks.pop() current_pos = obj.current_size // 8 # Check if padding needed size = chunk.offset - current_pos if size > 0: obj.info("Padding of %u bytes needed: curr=%u offset=%u" % (size, current_pos, chunk.offset)) yield PaddingBytes(obj, "padding[]", size) current_pos = obj.current_size // 8 # Find resynch point if needed count = 0 old_off = chunk.offset while chunk.offset < current_pos: count += 1 chunk = self.chunks.pop() # Unfortunaly, we also pass the underlying chunks if chunk is None: obj.info("Couldn't resynch: %u object skipped to reach %u" % (count, current_pos)) return # Resynch size = chunk.offset - current_pos if size > 0: obj.info("Skipped %u objects to resynch to %u; chunk offset: %u->%u" % (count, current_pos, old_off, chunk.offset)) yield RawBytes(obj, "resynch[]", size) # Yield obj.info("Yielding element of size %u at offset %u" % \ (chunk.size, chunk.offset)) field = chunk.cls(obj, chunk.name, chunk.size, *chunk.args) # Not tested, probably wrong: # if chunk.size: field.static_size = 8*chunk.size yield field if hasattr(field, "getSubChunks"): for sub_chunk in field.getSubChunks(): obj.info("Adding sub chunk: position=%u size=%u name='%s'" % (sub_chunk.offset, sub_chunk.size, sub_chunk.name)) self.addChunk(sub_chunk)
def markerHeader(self): yield String(self, "signature", 7, "Signature") yield UInt8(self, "ver_extract", "Version needed to extract archive") yield UInt8(self, "ver_created", "Version used to create archive") yield Enum(UInt8(self, "host_os", "OS where the files were compressed"), HOST_OS) yield UInt8(self, "vol_num", "Volume number") yield TimeDateMSDOS32(self, "time", "Date and time (MS DOS format)") yield Bits(self, "reserved", 64, "Reserved size for future extensions") flags = self["flags"] if flags["has_av_string"].value: yield PascalString8(self, "av_string", "AV String") if flags["has_comment"].value: size = filesizeHandler(UInt16(self, "comment_size", "Comment size")) yield size if size.value > 0: yield RawBytes(self, "compressed_comment", size.value, \ "Compressed comment")
def createFields(self): yield UInt64(self, "zip64_end_size", "Size of zip64 end of central directory record") yield ZipVersion(self, "version_made_by", "Version made by") yield ZipVersion(self, "version_needed", "Version needed to extract") yield UInt32(self, "number_disk", "Number of this disk") yield UInt32( self, "number_disk2", "Number of the disk with the start of the central directory") yield UInt64( self, "number_entries", "Total number of entries in the central directory on this disk") yield UInt64(self, "number_entries2", "Total number of entries in the central directory") yield UInt64(self, "size", "Size of the central directory") yield UInt64(self, "offset", "Offset of start of central directory") if 0 < self["zip64_end_size"].value: yield RawBytes(self, "data_sector", self["zip64_end_size"].value, "zip64 extensible data sector")
def createFields(self): yield Bit(self, "last_metadata_block", "True if this is the last metadata block") yield Enum(Bits(self, "block_type", 7, "Metadata block header type"), self.BLOCK_TYPE_DESC) yield UInt24(self, "metadata_length", "Length of following metadata in bytes (doesn't include this header)") block_type = self["block_type"].value size = self["metadata_length"].value if not size: return try: handler = self.BLOCK_TYPES[block_type][2] except KeyError: handler = None if handler: yield handler(self, "content", size=size * 8) elif self["block_type"].value == 1: yield NullBytes(self, "padding", size) else: yield RawBytes(self, "rawdata", size)
def parseString(parent): yield UInt32(parent, "length", "Length") length = parent["length"].value if parent.name == "lnotab": bytecode_offset = 0 line_number = parent['../firstlineno'].value for i in range(0, length, 2): bc_off_delta = UInt8(parent, 'bytecode_offset_delta[]') yield bc_off_delta bytecode_offset += bc_off_delta.value bc_off_delta._description = 'Bytecode Offset %i' % bytecode_offset line_number_delta = UInt8(parent, 'line_number_delta[]') yield line_number_delta line_number += line_number_delta.value line_number_delta._description = 'Line Number %i' % line_number elif 0 < length: yield RawBytes(parent, "text", length, "Content") if DISASSEMBLE and parent.name == "compiled_code": disassembleBytecode(parent["text"])
def createFields(self): yield UInt32(self, "offset", "Offset to data (from file start)") yield UInt16(self, "data_blocks", "Number of data blocks which are in this cabinet") yield Enum(Bits(self, "compr_method", 4, "Compression method"), COMPRESSION_NAME) if self["compr_method"].value in [ 2, 3 ]: # Quantum or LZX use compression level yield PaddingBits(self, "padding[]", 4) yield Bits(self, "compr_level", 5, "Compression level") yield PaddingBits(self, "padding[]", 3) else: yield PaddingBits(self, "padding[]", 12) if self["../flags/has_reserved"].value and self[ "../reserved_folder_size"].value: yield RawBytes(self, "reserved_folder", self["../reserved_folder_size"].value, "Per-folder reserved area")
def createFields(self): if 3 <= self["../ver_major"].value: # ID3 v2.3 and 2.4 yield Enum( String(self, "tag", 4, "Tag", charset="ASCII", strip="\0"), ID3_Chunk.tag23_name) if 4 <= self["../ver_major"].value: yield ID3_Size(self, "size") # ID3 v2.4 else: yield UInt32(self, "size") # ID3 v2.3 yield Bit(self, "tag_alter", "Tag alter preservation") yield Bit(self, "file_alter", "Tag alter preservation") yield Bit(self, "rd_only", "Read only?") yield NullBits(self, "padding[]", 5) yield Bit(self, "compressed", "Frame is compressed?") yield Bit(self, "encrypted", "Frame is encrypted?") yield Bit(self, "group", "Grouping identity") yield NullBits(self, "padding[]", 5) size = self["size"].value is_compressed = self["compressed"].value else: # ID3 v2.2 yield Enum( String(self, "tag", 3, "Tag", charset="ASCII", strip="\0"), ID3_Chunk.tag22_name) yield UInt24(self, "size") size = self["size"].value - self.current_size // 8 + 6 is_compressed = False if size: cls = None if not (is_compressed): tag = self["tag"].value if tag in ID3_Chunk.handler: cls = ID3_Chunk.handler[tag] elif tag[0] == "T": cls = ID3_StringCharset if cls: yield cls(self, "content", "Content", size=size * 8) else: yield RawBytes(self, "content", size, "Raw data content")
def createFields(self): yield PascalString16(self, "name", "Name", charset="UTF-16-LE", strip="\0") yield Enum(UInt16(self, "type"), self.TYPE_NAME) yield UInt16(self, "value_length") type = self["type"].value size = self["value_length"].value name = "value" if type == 0 and (size % 2) == 0: yield String(self, name, size, charset="UTF-16-LE", strip="\0") elif type in (2, 3): yield UInt32(self, name) elif type == 4: yield UInt64(self, name) else: yield RawBytes(self, name, size)
def parseImageData(parent, name, size, header): if ("compression" not in header) or (header["compression"].value in (0, 3)): width = header["width"].value height = header["height"].value bpp = header["bpp"].value if bpp == 32: cls = UInt32 elif bpp == 24: cls = RGB elif bpp == 8: cls = UInt8 elif bpp == 4: cls = Pixel4bit else: cls = None if cls: return ImagePixels(parent, name, width, height, cls, size=size * 8) return RawBytes(parent, name, size)
def createFields(self): LONG = Int32 yield UInt32(self, "type", "Record type (always 1)") yield UInt32(self, "size", "Size of the header in bytes") yield RECT32(self, "Bounds", "Inclusive bounds") yield RECT32(self, "Frame", "Inclusive picture frame") yield textHandler( UInt32(self, "signature", "Signature ID (always 0x464D4520)"), hexadecimal) yield UInt16(self, "min_ver", "Minor version") yield UInt16(self, "maj_ver", "Major version") yield UInt32(self, "file_size", "Size of the file in bytes") yield UInt32(self, "NumOfRecords", "Number of records in the metafile") yield UInt16(self, "NumOfHandles", "Number of handles in the handle table") yield NullBytes(self, "reserved", 2) yield UInt32(self, "desc_size", "Size of description in 16-bit words") yield UInt32(self, "desc_ofst", "Offset of description string in metafile") yield UInt32(self, "nb_colors", "Number of color palette entries") yield LONG(self, "width_px", "Width of reference device in pixels") yield LONG(self, "height_px", "Height of reference device in pixels") yield LONG(self, "width_mm", "Width of reference device in millimeters") yield LONG(self, "height_mm", "Height of reference device in millimeters") # Read description (if any) offset = self["desc_ofst"].value current = (self.absolute_address + self.current_size) // 8 size = self["desc_size"].value * 2 if offset == current and size: yield String(self, "description", size, charset="UTF-16-LE", strip="\0 ") # Read padding (if any) size = self["size"].value - self.current_size // 8 if size: yield RawBytes(self, "padding", size)
def createFields(self): yield textHandler(UInt8(self, "signature", "IPTC signature (0x1c)"), hexadecimal) if self["signature"].value != 0x1C: raise ParserError("Wrong IPTC signature") yield textHandler(UInt8(self, "dataset_nb", "Dataset number"), hexadecimal) yield UInt8(self, "tag", "Tag") yield IPTC_Size(self, "size", "Content size") size = self["size"].value if 0 < size: if self.dataset_info: cls = self.dataset_info[2] else: cls = None if cls: yield cls(self, "content") else: yield RawBytes(self, "content", size)
def parseMediaPropertiesHeader(self): yield UInt16(self, "stream_number", "Stream number") yield UInt32(self, "max_bit_rate", "Maximum bit rate") yield UInt32(self, "avg_bit_rate", "Average bit rate") yield UInt32(self, "max_pkt_size", "Size of largest data packet") yield UInt32(self, "avg_pkt_size", "Size of average data packet") yield UInt32(self, "stream_start", "Stream start offset in milliseconds") yield UInt32(self, "preroll", "Preroll in milliseconds") yield UInt32(self, "duration", "Stream duration in milliseconds") yield PascalString8(self, "desc", "Stream description", charset="ISO-8859-1") yield PascalString8(self, "mime_type", "MIME type string", charset="ASCII") yield UInt32(self, "specific_size", "Size of type-specific data") size = self['specific_size'].value if size: if self["mime_type"].value == "logical-fileinfo": yield LogicalFileInfo(self, "file_info", size=size * 8) else: yield RawBytes(self, "specific", size, "Type-specific data")
def specialHeader(s, is_file): yield filesizeHandler( UInt32(s, "compressed_size", "Compressed size (bytes)")) yield filesizeHandler( UInt32(s, "uncompressed_size", "Uncompressed size (bytes)")) yield Enum(UInt8(s, "host_os", "Operating system used for archiving"), OS_NAME) yield textHandler(UInt32(s, "crc32", "File CRC32"), hexadecimal) yield TimeDateMSDOS32(s, "ftime", "Date and time (MS DOS format)") yield textHandler( UInt8(s, "version", "RAR version needed to extract file"), formatRARVersion) yield Enum(UInt8(s, "method", "Packing method"), COMPRESSION_NAME) yield filesizeHandler(UInt16(s, "filename_length", "File name size")) if s["host_os"].value in (OS_MSDOS, OS_WIN32): yield MSDOSFileAttr32(s, "file_attr", "File attributes") else: yield textHandler(UInt32(s, "file_attr", "File attributes"), hexadecimal) # Start additional field from unrar if s["flags/is_large"].value: yield filesizeHandler( UInt64(s, "large_size", "Extended 64bits filesize")) # End additional field size = s["filename_length"].value if size > 0: if s["flags/is_unicode"].value: charset = "UTF-8" else: charset = "ISO-8859-15" yield String(s, "filename", size, "Filename", charset=charset) # Start additional fields from unrar - file only if is_file: if s["flags/has_salt"].value: yield RawBytes(s, "salt", 8, "Encryption salt to increase security") if s["flags/has_ext_time"].value: yield ExtTime(s, "extra_time")
def createFields(self): if self.root.isEMF(): yield Enum(UInt32(self, "function"), EMF_META_NAME) yield UInt32(self, "size") try: parser = EMF_META[self["function"].value][2] except KeyError: parser = None else: yield UInt32(self, "size") yield Enum(UInt16(self, "function"), META_NAME) try: parser = META[self["function"].value][2] except KeyError: parser = None if parser: for field in parser(self): yield field else: size = (self.size - self.current_size) // 8 if size: yield RawBytes(self, "data", size)
def createFields(self): yield Enum(UInt16(self, "type"), self.type_name) yield UInt16(self, "name_len", "Name length in character (byte=len*2)") if self["name_len"].value: yield String(self, "name", self["name_len"].value * 2, "Name", charset="UTF-16-LE", strip=" \0") yield UInt16(self, "desc_len", "Description length in character (byte=len*2)") if self["desc_len"].value: yield String(self, "desc", self["desc_len"].value * 2, "Description", charset="UTF-16-LE", strip=" \0") yield UInt16(self, "info_len") if self["info_len"].value: yield RawBytes(self, "info", self["info_len"].value)
def createFields(self): yield String(self, "signature", 4, "8BIM signature", charset="ASCII") if self["signature"].value != "8BIM": raise ParserError("Stream doesn't look like 8BIM item (wrong signature)!") yield textHandler(UInt16(self, "tag"), hexadecimal) if self.stream.readBytes(self.absolute_address + self.current_size, 4) != "\0\0\0\0": yield PascalString8(self, "name") size = 2 + (self["name"].size // 8) % 2 yield NullBytes(self, "name_padding", size) else: yield String(self, "name", 4, strip="\0") yield UInt16(self, "size") size = alignValue(self["size"].value, 2) if not size: return if self.handler: if issubclass(self.handler, FieldSet): yield self.handler(self, "content", size=size * 8) else: yield self.handler(self, "content") else: yield RawBytes(self, "content", size)
def createFields(self): yield UInt32(self, "unknown", description="Always 1") yield BudHeader(self, "header") self.seekByte(self['header/allocator_offset'].value + 4) yield BudAllocator(self, "allocator", size=self['header/allocator_size'].value * 8) for dir in self['allocator'].array('dir'): if dir['name'].value == 'DSDB': break else: raise ParserError("DSDB not found.") offs, size = self.getBlock(dir['block'].value) self.seekByte(offs + 4) yield DSDB(self, "dsdb", size=size * 8) blocks = [self['dsdb/root_block'].value] while blocks: block = blocks.pop() offs, size = self.getBlock(block) self.seekByte(offs + 4) node = BTNode(self, "node[%d]" % block, size=size * 8) yield node if node['last_block'].value != 0: new_blocks = [] for block in node.array('child_block'): new_blocks.append(block.value) new_blocks.append(node['last_block'].value) blocks.extend(reversed(new_blocks)) # dfs # blocks = new_blocks[::-1] + blocks # bfs for i, fl in enumerate(self['allocator'].array('freelist')): if fl['count'].value == 0: continue for offs in fl.array('offset'): size = min(1 << i, self.size // 8 - offs.value - 4) if size > 0: self.seekByte(offs.value + 4) yield RawBytes(self, "free[]", size)
def createFields(self): yield textHandler( UInt16(self, "crc16", "Archive CRC16 (from byte 4 on)"), hexadecimal) yield filesizeHandler( UInt16(self, "head_size", "Block size (from byte 4 on)")) yield UInt8(self, "block_type", "Block type") # Flags for flag in self.parseFlags(self): yield flag # Rest of the header for field in self.parseHeader(self): yield field size = self["head_size"].value - (self.current_size // 8) + (2 + 2) if size > 0: yield RawBytes(self, "extra_data", size, "Extra header data, unhandled") # Body in itself for field in self.parseBody(self): yield field
def createFields(self): header = Header(self, "header") yield header channels = header.getNumChannels() # Number of patterns patterns = 0 for index in xrange(128): patterns = max(patterns, header["patterns/position[%u]" % index].value) patterns += 1 # Yield patterns for index in xrange(patterns): yield Pattern(self, "pattern[]", channels) # Yield samples for index in xrange(31): count = header["samples/info[%u]/sample_count" % index].value if count: self.info("Yielding sample %u: %u samples" % (index, count)) yield RawBytes(self, "sample_data[]", 2 * count, \ "Sample %u" % index)
def createFields(self): yield String(self, "tag", 4, "Tag", charset="ASCII") yield filesizeHandler(UInt32(self, "size", "Size")) if not self["size"].value: return if self["tag"].value == "LIST": yield String(self, "subtag", 4, "Sub-tag", charset="ASCII") handler = self.tag_info[1] while 8 < (self.size - self.current_size) // 8: field = self.__class__(self, "field[]") yield field if (field.size // 8) % 2 != 0: yield UInt8(self, "padding[]", "Padding") else: handler = self.tag_info[1] if handler: for field in handler(self): yield field else: yield RawBytes(self, "raw_content", self["size"].value) padding = self.seekBit(self._size) if padding: yield padding
def createFields(self): yield Bytes(self, "magic", 8, "File magic (bplist00)") if self.size: self.seekByte(self.size // 8 - 32, True) else: # FIXME: UNTESTED while True: try: self.seekByte(1024) except Exception: break self.seekByte(self.size // 8 - 32) yield BPListTrailer(self, "trailer") self.seekByte(self['trailer/offsetTableOffset'].value) yield BPListOffsetTable(self, "offset_table") for i in self.array("offset_table/offset"): if self.current_size > i.value * 8: self.seekByte(i.value) elif self.current_size < i.value * 8: # try to detect files with gaps or unparsed content yield RawBytes(self, "padding[]", i.value - self.current_size // 8) yield BPListObject(self, "object[]")
def createFields(self): yield UInt32(self, "width", "Width in pixel") yield UInt32(self, "height", "Height in pixel") yield UInt32(self, "offset", "Offset") offset = self["offset"].value if offset == 0: return data_offsets = [] while (self.absolute_address + self.current_size) // 8 < offset: chunk = UInt32(self, "data_offset[]", "Data offset") yield chunk if chunk.value == 0: break data_offsets.append(chunk) if (self.absolute_address + self.current_size) // 8 != offset: raise ParserError("Problem with level offset.") previous = offset for chunk in data_offsets: data_offset = chunk.value size = data_offset - previous yield RawBytes(self, "data[]", size, "Data content of %s" % chunk.name) previous = data_offset
def createFields(self): yield Integer(self, "time", "Delta time in ticks") next = self.stream.readBits(self.absolute_address + self.current_size, 8, self.root.endian) if next & 0x80 == 0: # "Running Status" command if self.prev_command is None: raise ParserError( "Running Status command not preceded by another command.") self.command = self.prev_command.command else: yield Enum(textHandler(UInt8(self, "command"), hexadecimal), self.COMMAND_DESC) self.command = self["command"].value if self.command == 0xFF: yield Enum(textHandler(UInt8(self, "meta_command"), hexadecimal), self.META_COMMAND_DESC) yield UInt8(self, "data_len") size = self["data_len"].value if size: command = self["meta_command"].value if command in self.META_COMMAND_PARSER: parser = self.META_COMMAND_PARSER[command] else: parser = None if parser: for field in parser(self, size): yield field else: yield RawBytes(self, "data", size) else: if self.command not in self.COMMAND_PARSER: raise ParserError("Unknown command: %s" % self["command"].display) parser = self.COMMAND_PARSER[self.command] for field in parser(self): yield field
def createFields(self): yield textHandler(UInt16(self, "crc16", "Block CRC16"), hexadecimal) yield textHandler(UInt8(self, "block_type", "Block type"), hexadecimal) # Parse flags for field in self.parseFlags(): yield field # Get block size yield filesizeHandler(UInt16(self, "block_size", "Block size")) # Parse remaining header for field in self.parseHeader(): yield field # Finish header with stuff of unknow size size = self["block_size"].value - (self.current_size // 8) if size > 0: yield RawBytes(self, "unknown", size, "Unknow data (UInt32 probably)") # Parse body for field in self.parseBody(): yield field
def fixInstrumentHeader(self): size = self["size"].value - self.current_size // 8 if size: yield RawBytes(self, "unknown_data", size)