def gate_factory(class_dict): normal_color_struct = cs.Struct(cs.Padding(2), 'category' / cs.Byte, 'color' / cs.Byte) offset_color_struct = cs.Struct('source' / cs.Int, cs.Const(class_dict['color_triplet']), 'hsv' / cs.Single[3]) color_entry_struct = cs.Struct( 'type' / cs.RawCopy(cs.Short), 'colors' / cs.Switch( cs.this.type.data, { class_dict['normal_color']: normal_color_struct, class_dict['offset_color']: offset_color_struct })) color_struct = cs.Struct( cs.Const(class_dict['colorization']), 'num_entries' / cs.Int, 'colorizations' / color_entry_struct[cs.this.num_entries]) level_struct = cs.Struct( cs.Const(class_dict['gate_level']), cs.Const(b'\x01'), 'level_name' / cs.PascalString(cs.Short, 'ascii'), cs.Const(b'\x01'), 'level_icon' / cs.PascalString(cs.Short, 'ascii'), 'colorization' / color_struct, cs.Const(b'\x01'), 'description' / cs.PascalString(cs.Short, 'ascii'), cs.Const(class_dict['level_type']), 'level_type' / cs.Byte, 'restricted' / cs.Byte) velocity_struct = cs.Struct('velocity' / cs.Single, cs.Const(b'\x01'), 'num_entries' / cs.Int, 'lengths' / cs.Single[cs.this.num_entries], 'start' / cs.Long) wheel_struct = cs.Struct( 'class_id' / cs.RawCopy(cs.Short), 'unknown' / cs.Byte[0x05], 'num_levels' / cs.Short, 'levels' / level_struct[cs.this.num_levels], 'velocity' / cs.If(cs.this.class_id.data != class_dict['random_depth'], velocity_struct)) gate_struct = cs.Struct( cs.Const(b'\x01'), 'gate_id' / cs.Int, cs.Const(b'\x01'), 'gate_name' / cs.PascalString(cs.Short, 'ascii'), cs.Const(b'\x01'), 'gate_icon' / cs.PascalString(cs.Short, 'ascii'), 'colorization' / color_struct, cs.Const(b'\x01'), 'description' / cs.PascalString(cs.Short, 'ascii'), 'unknown' / cs.Byte[0x16], 'num_wheels' / cs.Int, 'wheels' / wheel_struct[cs.this.num_wheels], 'class_id' / cs.Int16sb, cs.If(cs.this.class_id < 0, cs.PascalString(cs.Short, 'ascii')), 'themes' / cs.Struct('unknown' / cs.Byte[0x07], 'themes' / cs.Byte[0x06])) return gate_struct
def __init__(s): s.header_cmd0 = construct.Struct('CMD0Header', construct.UBInt8('magic'), construct.UBInt8('unk_0'), construct.UBInt8('unk_1'), construct.UBInt8('unk_2'), construct.UBInt8('unk_3'), construct.UBInt8('flags'), construct.UBInt8('id_primary'), construct.UBInt8('id_secondary'), construct.UBInt16('error_code'), construct.UBInt16('payload_size_cmd0') ) s.header_cmd1 = construct.Struct('CMD1Header', construct.Padding(48) ) s.header_cmd2 = construct.Struct('CMD2Header', construct.ULInt16('JDN_base'), construct.Padding(2), construct.ULInt32('seconds') ) s.header = construct.Struct('CMDHeader', construct.ULInt16('packet_type'), construct.ULInt16('cmd_id'), construct.ULInt16('payload_size'), construct.ULInt16('seq_id'), construct.Switch('cmd_hdr', lambda ctx: ctx.cmd_id, { 0 : construct.If(lambda ctx: ctx.payload_size >= s.header_cmd0.sizeof(), construct.Embed(s.header_cmd0)), 1 : construct.If(lambda ctx: ctx.payload_size == s.header_cmd1.sizeof(), construct.Embed(s.header_cmd1)), 2 : construct.If(lambda ctx: ctx.payload_size == s.header_cmd2.sizeof(), construct.Embed(s.header_cmd2)) }, default = construct.Pass ) ) s.cmd_handlers = { 0 : s.cmd0, 1 : s.cmd1, 2 : s.cmd2 } s.cmd0_handlers = { 5 : { 6 : s.cmd0_5_6 }, }
def ClassEntry_(): return Struct( "_type" / Int16ul, "isInstance" / C.Computed(lambda this: this._type & 1), "Class_Index" / C.Computed(lambda this: this._type >> 1), "_valid" / C.Computed(lambda this: this._type < len(this._root.defs)), "index" / Int16ul, "content" / C.If( this.isInstance, C.LazyBound(lambda: PrefixedOffset( Int64ul, ClassImplementation(this._._.Class_Index), -8))))
class AslParser(interface.BaseParser): """Parser for ASL log files.""" NAME = 'asl_log' DESCRIPTION = u'Parser for ASL log files.' ASL_MAGIC = 'ASL DB\x00\x00\x00\x00\x00\x00' # If not right assigned, the value is "-1". ASL_NO_RIGHTS = 'ffffffff' # Priority level (criticity) ASL_MESSAGE_PRIORITY = { 0: 'EMERGENCY', 1: 'ALERT', 2: 'CRITICAL', 3: 'ERROR', 4: 'WARNING', 5: 'NOTICE', 6: 'INFO', 7: 'DEBUG' } # ASL File header. # magic: magic number that identify ASL files. # version: version of the file. # offset: first record in the file. # timestamp: epoch time when the first entry was written. # last_offset: last record in the file. ASL_HEADER_STRUCT = construct.Struct('asl_header_struct', construct.String('magic', 12), construct.UBInt32('version'), construct.UBInt64('offset'), construct.UBInt64('timestamp'), construct.UBInt32('cache_size'), construct.UBInt64('last_offset'), construct.Padding(36)) # The record structure is: # [HEAP][STRUCTURE][4xExtraField][2xExtraField]*[PreviousEntry] # Record static structure. # tam_entry: it contains the number of bytes from this file position # until the end of the record, without counts itself. # next_offset: next record. If is equal to 0x00, it is the last record. # asl_message_id: integer that has the numeric identification of the event. # timestamp: Epoch integer that has the time when the entry was created. # nanosecond: nanosecond to add to the timestamp. # level: level of priority. # pid: process identification that ask to save the record. # uid: user identification that has lunched the process. # gid: group identification that has lunched the process. # read_uid: identification id of a user. Only applied if is not -1 (all FF). # Only root and this user can read the entry. # read_gid: the same than read_uid, but for the group. ASL_RECORD_STRUCT = construct.Struct('asl_record_struct', construct.Padding(2), construct.UBInt32('tam_entry'), construct.UBInt64('next_offset'), construct.UBInt64('asl_message_id'), construct.UBInt64('timestamp'), construct.UBInt32('nanosec'), construct.UBInt16('level'), construct.UBInt16('flags'), construct.UBInt32('pid'), construct.UBInt32('uid'), construct.UBInt32('gid'), construct.UBInt32('read_uid'), construct.UBInt32('read_gid'), construct.UBInt64('ref_pid')) ASL_RECORD_STRUCT_SIZE = ASL_RECORD_STRUCT.sizeof() # 8-byte fields, they can be: # - String: [Nibble = 1000 (8)][Nibble = Length][7 Bytes = String]. # - Integer: integer that has the byte position in the file that points # to an ASL_RECORD_DYN_VALUE struct. If the value of the integer # is equal to 0, it means that it has not data (skip). # If the field is a String, we use this structure to decode each # integer byte in the corresponding character (ASCII Char). ASL_OCTET_STRING = construct.ExprAdapter(construct.Octet('string'), encoder=lambda obj, ctx: ord(obj), decoder=lambda obj, ctx: chr(obj)) # Field string structure. If the first bit is 1, it means that it # is a String (1000) = 8, then the next nibble has the number of # characters. The last 7 bytes are the number of bytes. ASL_STRING = construct.BitStruct( 'string', construct.Flag('type'), construct.Bits('filler', 3), construct.If(lambda ctx: ctx.type, construct.Nibble('string_length')), construct.If(lambda ctx: ctx.type, construct.Array(7, ASL_OCTET_STRING))) # 8-byte pointer to a byte position in the file. ASL_POINTER = construct.UBInt64('pointer') # Dynamic data structure pointed by a pointer that contains a String: # [2 bytes padding][4 bytes lenght of String][String]. ASL_RECORD_DYN_VALUE = construct.Struct( 'asl_record_dyn_value', construct.Padding(2), construct.PascalString('value', length_field=construct.UBInt32('length'))) def Parse(self, parser_context, file_entry): """Extract entries from an ASL file. Args: parser_context: A parser context object (instance of ParserContext). file_entry: A file entry object (instance of dfvfs.FileEntry). """ file_object = file_entry.GetFileObject() file_object.seek(0, os.SEEK_SET) try: header = self.ASL_HEADER_STRUCT.parse_stream(file_object) except (IOError, construct.FieldError) as exception: file_object.close() raise errors.UnableToParseFile( u'Unable to parse ASL Header with error: {0:s}.'.format( exception)) if header.magic != self.ASL_MAGIC: file_object.close() raise errors.UnableToParseFile( u'Not an ASL Header, unable to parse.') # Get the first and the last entry. offset = header.offset old_offset = header.offset last_offset_header = header.last_offset # If the ASL file has entries. if offset: event_object, offset = self.ReadAslEvent(file_object, offset) while event_object: parser_context.ProduceEvent(event_object, parser_name=self.NAME, file_entry=file_entry) # TODO: an anomaly object must be emitted once that is implemented. # Sanity check, the last read element must be the same as # indicated by the header. if offset == 0 and old_offset != last_offset_header: logging.warning(u'Parsing ended before the header ends.') old_offset = offset event_object, offset = self.ReadAslEvent(file_object, offset) file_object.close() def ReadAslEvent(self, file_object, offset): """Returns an AslEvent from a single ASL entry. Args: file_object: a file-like object that points to an ASL file. offset: offset where the static part of the entry starts. Returns: An event object constructed from a single ASL record. """ # The heap of the entry is saved to try to avoid seek (performance issue). # It has the real start position of the entry. dynamic_start = file_object.tell() dynamic_part = file_object.read(offset - file_object.tell()) if not offset: return None, None try: record_header = self.ASL_RECORD_STRUCT.parse_stream(file_object) except (IOError, construct.FieldError) as exception: logging.warning( u'Unable to parse ASL event with error: {0:s}'.format( exception)) return None, None # Variable tam_fields = is the real length of the dynamic fields. # We have this: [Record_Struct] + [Dynamic_Fields] + [Pointer_Entry_Before] # In Record_Struct we have a field called tam_entry, where it has the number # of bytes until the end of the entry from the position that the field is. # The tam_entry is between the 2th and the 6th byte in the [Record_Struct]. # tam_entry = ([Record_Struct]-6)+[Dynamic_Fields]+[Pointer_Entry_Before] # Also, we do not need [Point_Entry_Before] and then we delete the size of # [Point_Entry_Before] that it is 8 bytes (8): # tam_entry = ([Record_Struct]-6)+[Dynamic_Fields]+[Pointer_Entry_Before] # [Dynamic_Fields] = tam_entry - [Record_Struct] + 6 - 8 # [Dynamic_Fields] = tam_entry - [Record_Struct] - 2 tam_fields = record_header.tam_entry - self.ASL_RECORD_STRUCT_SIZE - 2 # Dynamic part of the entry that contains minimal four fields of 8 bytes # plus 2x[8bytes] fields for each extra ASL_Field. # The four first fields are always the Host, Sender, Facility and Message. # After the four first fields, the entry might have extra ASL_Fields. # For each extra ASL_field, it has a pair of 8-byte fields where the first # 8 bytes contains the name of the extra ASL_field and the second 8 bytes # contains the text of the exta field. # All of this 8-byte field can be saved using one of these three differents # types: # - Null value ('0000000000000000'): nothing to do. # - String: It is string if first bit = 1 or first nibble = 8 (1000). # Second nibble has the length of string. # The next 7 bytes have the text characters of the string # padding the end with null characters: '0x00'. # Example: [8468 6964 6400 0000] # [8] String, [4] length, value: [68 69 64 64] = hidd. # - Pointer: static position in the file to a special struct # implemented as an ASL_RECORD_DYN_VALUE. # Example: [0000 0000 0000 0077] # It points to the file position 0x077 that has a # ASL_RECORD_DYN_VALUE structure. values = [] while tam_fields > 0: try: raw_field = file_object.read(8) except (IOError, construct.FieldError) as exception: logging.warning( u'Unable to parse ASL event with error: {0:d}'.format( exception)) return None, None try: # Try to read as a String. field = self.ASL_STRING.parse(raw_field) values.append(''.join(field.string[0:field.string_length])) # Go to parse the next extra field. tam_fields -= 8 continue except ValueError: pass # If it is not a string, it must be a pointer. try: field = self.ASL_POINTER.parse(raw_field) except ValueError as exception: logging.warning( u'Unable to parse ASL event with error: {0:s}'.format( exception)) return None, None if field != 0: # The next IF ELSE is only for performance issues, avoiding seek. # If the pointer points a lower position than where the actual entry # starts, it means that it points to a previuos entry. pos = field - dynamic_start # Bigger or equal 0 means that the data is in the actual entry. if pos >= 0: try: values.append((self.ASL_RECORD_DYN_VALUE.parse( dynamic_part[pos:])).value.partition('\x00')[0]) except (IOError, construct.FieldError) as exception: logging.warning( u'Unable to parse ASL event with error: {0:s}'. format(exception)) return None, None else: # Only if it is a pointer that points to the # heap from another entry we use the seek method. main_position = file_object.tell() # If the pointer is in a previous entry. if main_position > field: file_object.seek(field - main_position, os.SEEK_CUR) try: values.append( (self.ASL_RECORD_DYN_VALUE.parse_stream( file_object)).value.partition('\x00')[0]) except (IOError, construct.FieldError): logging.warning(( u'The pointer at {0:d} (0x{0:x}) points to invalid ' u'information.' ).format(main_position - self.ASL_POINTER.sizeof())) # Come back to the position in the entry. _ = file_object.read(main_position - file_object.tell()) else: _ = file_object.read(field - main_position) values.append((self.ASL_RECORD_DYN_VALUE.parse_stream( file_object)).value.partition('\x00')[0]) # Come back to the position in the entry. file_object.seek(main_position - file_object.tell(), os.SEEK_CUR) # Next extra field: 8 bytes more. tam_fields -= 8 # Read the last 8 bytes of the record that points to the previous entry. _ = file_object.read(8) # Parsed section, we translate the read data to an appropriate format. microsecond = record_header.nanosec // 1000 timestamp = timelib.Timestamp.FromPosixTimeWithMicrosecond( record_header.timestamp, microsecond) record_position = offset message_id = record_header.asl_message_id level = u'{0} ({1})'.format( self.ASL_MESSAGE_PRIORITY[record_header.level], record_header.level) # If the value is -1 (FFFFFFFF), it can be read by everyone. if record_header.read_uid != int(self.ASL_NO_RIGHTS, 16): read_uid = record_header.read_uid else: read_uid = 'ALL' if record_header.read_gid != int(self.ASL_NO_RIGHTS, 16): read_gid = record_header.read_gid else: read_gid = 'ALL' # Parsing the dynamic values (text or pointers to position with text). # The first four are always the host, sender, facility, and message. computer_name = values[0] sender = values[1] facility = values[2] message = values[3] # If the entry has an extra fields, they works as a pairs: # The first is the name of the field and the second the value. extra_information = '' if len(values) > 4: values = values[4:] for index in xrange(0, len(values) // 2): extra_information += (u'[{0}: {1}]'.format( values[index * 2], values[(index * 2) + 1])) # Return the event and the offset for the next entry. return AslEvent(timestamp, record_position, message_id, level, record_header, read_uid, read_gid, computer_name, sender, facility, message, extra_information), record_header.next_offset
construct.ULInt16('left'), construct.ULInt16('top'), construct.ULInt16('width'), construct.ULInt16('height'), construct.EmbeddedBitStruct( construct.Flag('lct_flag'), construct.Flag('interlace_flag'), construct.Flag('sort_flag'), construct.Padding(2), # reserved construct.macros.BitField('lct_size', 3), ), ), construct.If( lambda ctx: ctx.image_descriptor.lct_flag, construct.Array( lambda ctx: pow(2, ctx.image_descriptor.lct_size + 1), construct.Array(3, construct.ULInt8('lct')), ), ), construct.ULInt8('lzw_min'), _get_data_subblocks('compressed_indices'), ) _application_extension = construct.Struct( 'application_extension', construct.Value('block_type', lambda ctx: 'application'), construct.Const(construct.ULInt8('block_size'), 11), construct.String('app_id', 8), construct.Bytes('app_auth_code', 3), _get_data_subblocks('app_data'), )
"""Transaction hash, encoded as a reversed sequence of bytes.""" TxInput = c.Struct( "tx" / TxHash, "index" / c.Int32ul, "script_sig" / BitcoinBytes, "sequence" / c.Int32ul, ) """Transaction input.""" TxOutput = c.Struct("value" / c.Int64ul, "script_pubkey" / BitcoinBytes) """Transaction output.""" TxInputWitness = c.PrefixedArray(CompactUint, BitcoinBytes) """Array of witness records.""" Transaction = c.Struct( "version" / c.Int32ul, "segwit" / ConstFlag(b"\x00\x01"), "inputs" / c.PrefixedArray(CompactUint, TxInput), "outputs" / c.PrefixedArray(CompactUint, TxOutput), "witness" / c.If(c.this.segwit, TxInputWitness[c.len_(c.this.inputs)]), "lock_time" / c.Int32ul, c.Terminated, ) """Bitcoin transaction. If the `segwit` flag is present (which would otherwise mean 0 inputs, 1 output), we expect a `witness` field with entries corresponding to each input. """
class ASLParser(interface.FileObjectParser): """Parser for ASL log files.""" _INITIAL_FILE_OFFSET = None NAME = u'asl_log' DESCRIPTION = u'Parser for ASL log files.' _ASL_MAGIC = b'ASL DB\x00\x00\x00\x00\x00\x00' # ASL File header. # magic: magic number that identify ASL files. # version: version of the file. # offset: first record in the file. # timestamp: time when the first entry was written. # Contains the number of seconds since January 1, 1970 00:00:00 UTC. # last_offset: last record in the file. _ASL_HEADER_STRUCT = construct.Struct(u'asl_header_struct', construct.String(u'magic', 12), construct.UBInt32(u'version'), construct.UBInt64(u'offset'), construct.UBInt64(u'timestamp'), construct.UBInt32(u'cache_size'), construct.UBInt64(u'last_offset'), construct.Padding(36)) # The record structure is: # [HEAP][STRUCTURE][4xExtraField][2xExtraField]*[PreviousEntry] # Record static structure. # tam_entry: it contains the number of bytes from this file position # until the end of the record, without counts itself. # next_offset: next record. If is equal to 0x00, it is the last record. # asl_message_id: integer that has the numeric identification of the event. # timestamp: the entry creation date and time. # Contains the number of seconds since January 1, 1970 00:00:00 UTC. # nanosecond: nanosecond to add to the timestamp. # level: level of priority. # pid: process identification that ask to save the record. # uid: user identification that has lunched the process. # gid: group identification that has lunched the process. # read_uid: identification id of a user. Only applied if is not -1 (all FF). # Only root and this user can read the entry. # read_gid: the same than read_uid, but for the group. _ASL_RECORD_STRUCT = construct.Struct(u'asl_record_struct', construct.Padding(2), construct.UBInt32(u'tam_entry'), construct.UBInt64(u'next_offset'), construct.UBInt64(u'asl_message_id'), construct.UBInt64(u'timestamp'), construct.UBInt32(u'nanosec'), construct.UBInt16(u'level'), construct.UBInt16(u'flags'), construct.UBInt32(u'pid'), construct.UBInt32(u'uid'), construct.UBInt32(u'gid'), construct.UBInt32(u'read_uid'), construct.UBInt32(u'read_gid'), construct.UBInt64(u'ref_pid')) _ASL_RECORD_STRUCT_SIZE = _ASL_RECORD_STRUCT.sizeof() # 8-byte fields, they can be: # - String: [Nibble = 1000 (8)][Nibble = Length][7 Bytes = String]. # - Integer: integer that has the byte position in the file that points # to an ASL_RECORD_DYN_VALUE struct. If the value of the integer # is equal to 0, it means that it has not data (skip). # If the field is a String, we use this structure to decode each # integer byte in the corresponding character (ASCII Char). _ASL_OCTET_STRING = construct.ExprAdapter( construct.Octet(u'string'), encoder=lambda obj, ctx: ord(obj), decoder=lambda obj, ctx: chr(obj)) # Field string structure. If the first bit is 1, it means that it # is a String (1000) = 8, then the next nibble has the number of # characters. The last 7 bytes are the number of bytes. _ASL_STRING = construct.BitStruct( u'string', construct.Flag(u'type'), construct.Bits(u'filler', 3), construct.If(lambda ctx: ctx.type, construct.Nibble(u'string_length')), construct.If(lambda ctx: ctx.type, construct.Array(7, _ASL_OCTET_STRING))) # 8-byte pointer to a byte position in the file. _ASL_POINTER = construct.UBInt64(u'pointer') # Dynamic data structure pointed by a pointer that contains a String: # [2 bytes padding][4 bytes size of String][String]. _ASL_RECORD_DYN_VALUE = construct.Struct( u'asl_record_dyn_value', construct.Padding(2), construct.UBInt32(u'size'), construct.Bytes(u'value', lambda ctx: ctx.size)) def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses an ALS file-like object. Args: parser_mediator: a parser mediator object (instance of ParserMediator). file_object: a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ file_object.seek(0, os.SEEK_SET) try: header = self._ASL_HEADER_STRUCT.parse_stream(file_object) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( u'Unable to parse ASL Header with error: {0:s}.'.format( exception)) if header.magic != self._ASL_MAGIC: raise errors.UnableToParseFile( u'Not an ASL Header, unable to parse.') offset = header.offset if not offset: return header_last_offset = header.last_offset previous_offset = offset event_object, offset = self.ReadASLEvent(parser_mediator, file_object, offset) while event_object: # Sanity check, the last read element must be the same as # indicated by the header. if offset == 0 and previous_offset != header_last_offset: parser_mediator.ProduceParseError( u'Unable to parse header. Last element header does not match ' u'header offset.') previous_offset = offset event_object, offset = self.ReadASLEvent(parser_mediator, file_object, offset) def ReadASLEvent(self, parser_mediator, file_object, offset): """Reads an ASL record at a specific offset. Args: parser_mediator: a parser mediator object (instance of ParserMediator). file_object: a file-like object that points to an ASL file. offset: an integer containing the offset of the ASL record. Returns: A tuple of an event object extracted from the ASL record, and the offset to the next ASL record in the file. """ # The heap of the entry is saved to try to avoid seek (performance issue). # It has the real start position of the entry. dynamic_data_offset = file_object.tell() try: dynamic_data = file_object.read(offset - dynamic_data_offset) except IOError as exception: parser_mediator.ProduceParseError( u'unable to read ASL record dynamic data with error: {0:s}'. format(exception)) return None, None if not offset: return None, None try: record_struct = self._ASL_RECORD_STRUCT.parse_stream(file_object) except (IOError, construct.FieldError) as exception: parser_mediator.ProduceParseError( u'unable to parse ASL record with error: {0:s}'.format( exception)) return None, None # Variable tam_fields = is the real length of the dynamic fields. # We have this: [Record_Struct] + [Dynamic_Fields] + [Pointer_Entry_Before] # In Record_Struct we have a field called tam_entry, where it has the number # of bytes until the end of the entry from the position that the field is. # The tam_entry is between the 2th and the 6th byte in the [Record_Struct]. # tam_entry = ([Record_Struct]-6)+[Dynamic_Fields]+[Pointer_Entry_Before] # Also, we do not need [Point_Entry_Before] and then we delete the size of # [Point_Entry_Before] that it is 8 bytes (8): # tam_entry = ([Record_Struct]-6)+[Dynamic_Fields]+[Pointer_Entry_Before] # [Dynamic_Fields] = tam_entry - [Record_Struct] + 6 - 8 # [Dynamic_Fields] = tam_entry - [Record_Struct] - 2 tam_fields = record_struct.tam_entry - self._ASL_RECORD_STRUCT_SIZE - 2 # Dynamic part of the entry that contains minimal four fields of 8 bytes # plus 2 x [8 bytes] fields for each extra ASL_Field. # The four first fields are always the Host, Sender, Facility and Message. # After the four first fields, the entry might have extra ASL_Fields. # For each extra ASL_field, it has a pair of 8-byte fields where the first # 8 bytes contains the name of the extra ASL_field and the second 8 bytes # contains the text of the extra field. # All of this 8-byte field can be saved using one of these three different # types: # - Null value ('0000000000000000'): nothing to do. # - String: It is string if first bit = 1 or first nibble = 8 (1000). # Second nibble has the length of string. # The next 7 bytes have the text characters of the string # padding the end with null characters: '0x00'. # Example: [8468 6964 6400 0000] # [8] String, [4] length, value: [68 69 64 64] = hidd. # - Pointer: static position in the file to a special struct # implemented as an ASL_RECORD_DYN_VALUE. # Example: [0000 0000 0000 0077] # It points to the file position 0x077 that has a # ASL_RECORD_DYN_VALUE structure. values = [] while tam_fields > 0: try: field_data = file_object.read(8) except IOError as exception: parser_mediator.ProduceParseError( u'unable to read ASL field with error: {0:s}'.format( exception)) return None, None # Try to read the field data as a string. try: asl_string_struct = self._ASL_STRING.parse(field_data) string_data = b''.join( asl_string_struct.string[0:asl_string_struct. string_length]) values.append(string_data) # Go to parse the next extra field. tam_fields -= 8 continue except ValueError: pass # If the field is not a string it must be a pointer. try: pointer_value = self._ASL_POINTER.parse(field_data) except ValueError as exception: parser_mediator.ProduceParseError( u'unable to parse ASL field with error: {0:s}'.format( exception)) return None, None if not pointer_value: # Next extra field: 8 bytes more. tam_fields -= 8 continue # The next IF ELSE is only for performance issues, avoiding seek. # If the pointer points a lower position than where the actual entry # starts, it means that it points to a previous entry. pos = pointer_value - dynamic_data_offset # Greater or equal 0 means that the data is in the actual entry. if pos >= 0: try: dyn_value_struct = self._ASL_RECORD_DYN_VALUE.parse( dynamic_data[pos:]) dyn_value = dyn_value_struct.value.partition(b'\x00')[0] values.append(dyn_value) except (IOError, construct.FieldError) as exception: parser_mediator.ProduceParseError(( u'unable to parse ASL record dynamic value with error: ' u'{0:s}').format(exception)) return None, None else: # Only if it is a pointer that points to the # heap from another entry we use the seek method. main_position = file_object.tell() # If the pointer is in a previous entry. if main_position > pointer_value: file_object.seek(pointer_value - main_position, os.SEEK_CUR) try: dyn_value_struct = self._ASL_RECORD_DYN_VALUE.parse_stream( file_object) dyn_value = dyn_value_struct.value.partition( b'\x00')[0] values.append(dyn_value) except (IOError, construct.FieldError): parser_mediator.ProduceParseError(( u'the pointer at {0:d} (0x{0:08x}) points to invalid ' u'information.' ).format(main_position - self._ASL_POINTER.sizeof())) # Come back to the position in the entry. _ = file_object.read(main_position - file_object.tell()) else: _ = file_object.read(pointer_value - main_position) dyn_value_struct = self._ASL_RECORD_DYN_VALUE.parse_stream( file_object) dyn_value = dyn_value_struct.value.partition(b'\x00')[0] values.append(dyn_value) # Come back to the position in the entry. file_object.seek(main_position - file_object.tell(), os.SEEK_CUR) # Next extra field: 8 bytes more. tam_fields -= 8 # Read the last 8 bytes of the record that points to the previous entry. _ = file_object.read(8) # Parsed section, we translate the read data to an appropriate format. micro_seconds, _ = divmod(record_struct.nanosec, 1000) # Parsing the dynamic values (text or pointers to position with text). # The first four are always the host, sender, facility, and message. number_of_values = len(values) if number_of_values < 4: parser_mediator.ProduceParseError( u'less than four values read from an ASL event.') computer_name = u'N/A' sender = u'N/A' facility = u'N/A' message = u'N/A' if number_of_values >= 1: computer_name = values[0].decode(u'utf-8') if number_of_values >= 2: sender = values[1].decode(u'utf-8') if number_of_values >= 3: facility = values[2].decode(u'utf-8') if number_of_values >= 4: message = values[3].decode(u'utf-8') # If the entry has an extra fields, they works as a pairs: # The first is the name of the field and the second the value. extra_information = u'' if number_of_values > 4 and number_of_values % 2 == 0: # Taking all the extra attributes and merging them together, # eg: a = [1, 2, 3, 4] will look like "1: 2, 3: 4". try: extra_values = map(py2to3.UNICODE_TYPE, values[4:]) extra_information = u', '.join( map(u': '.join, zip(extra_values[0::2], extra_values[1::2]))) except UnicodeDecodeError as exception: parser_mediator.ProduceParseError( u'Unable to decode all ASL values in the extra information fields.' ) event_object = ASLEvent(record_struct.timestamp, offset, record_struct.asl_message_id, record_struct.level, record_struct.pid, record_struct.uid, record_struct.gid, record_struct.read_uid, record_struct.read_gid, computer_name, sender, facility, message, extra_information, micro_seconds=micro_seconds) parser_mediator.ProduceEvent(event_object) return (event_object, record_struct.next_offset)
CompactUint = CompactUintAdapter(CompactUintStruct) TxInput = c.Struct( "tx" / c.Bytes(32), "index" / c.Int32ul, # TODO coinbase tx "script" / c.Prefixed(CompactUint, c.GreedyBytes), "sequence" / c.Int32ul, ) TxOutput = c.Struct( "value" / c.Int64ul, "pk_script" / c.Prefixed(CompactUint, c.GreedyBytes), ) StackItem = c.Prefixed(CompactUint, c.GreedyBytes) TxInputWitness = c.PrefixedArray(CompactUint, StackItem) Transaction = c.Struct( "version" / c.Int32ul, "segwit" / ConstFlag(b"\x00\x01"), "inputs" / c.PrefixedArray(CompactUint, TxInput), "outputs" / c.PrefixedArray(CompactUint, TxOutput), "witness" / c.If(this.segwit, TxInputWitness[len_(this.inputs)]), "lock_time" / c.Int32ul, c.Terminated, ) print(Transaction.parse(tx_bin))
"stream_start" / construct.BitsInteger(24, swapped=True)), ) # # VideoFrame # gdv_video_frame = construct.Struct( "frame_header" / gdv_video_frame_header, construct.OnDemand(construct.Array(lambda ctx: ctx.frame_header.length, "data" / construct.Byte)) ) # # Chunk # gdv_chunk = construct.Struct( "audio_data" / construct.If(lambda ctx: ctx._.gdv_header.sound_flags.audio_present["AUDIO_PRESENT"] == True, construct.OnDemand(construct.Array(lambda ctx: get_length_audio_data(ctx._.gdv_header), "audio_data" / construct.Int8ul))), "video" / construct.If(lambda ctx: ctx._.gdv_header.frame_size != 0x00, gdv_video_frame) ) # # GDV header # gdv_header = construct.Struct( "start_gdv_header" / construct.Tell, "signature" / construct.Const("\x94\x19\x11\x29"), # + 0x00 "size_id" / construct.Int16ul, # + 0x04 "nb_frames" / construct.Int16ul, # + 0x06 "framerate" / construct.Int16ul, # + 0x08 "sound_flags" / gdv_soundflags, # + 0x0A "playback_frequency" / construct.Int16ul, # + 0x0C
"depth" / C.Int16ul, "counts" / C.Int16ul, "imageCount" / C.Computed(lambda this: this.counts & 0x3FF if this.version in swizzableFormats else this.counts >> 8), #C.Int8ul,#12 "mipCount" / C.Computed(lambda this: (this.counts >> 12 if this.version in swizzableFormats else this .counts & 0xFF)), #C.Int8ul,#4 "format" / C.Int32ul, "swizzleControl" / C.Int32sl, #C.Const(1,C.Int32ul), "cubemapMarker" / C.Int32ul, "unkn04" / C.Int8ul[2], "NULL0" / C.Const(0, C.Int16ul), "swizzleData" / C.If( lambda ctx: ctx.version in swizzableFormats, C.IfThenElse(lambda ctx: ctx.version in swizzledFormats, swizzleData, swizzleNull)), "textureHeaders" / mipData[C.this.mipCount][C.this.imageCount], "start" / C.Tell, "data" / C.GreedyBytes, ) TEXHeader = _TEXHeader #.compile() def expandBlockData(texhead, swizzle): texs = [] for image in texhead.textureHeaders: mips = [] for mipsTex in image: start = mipsTex.mipOffset - texhead.start end = start + (mipsTex.compressedSize
"code_length" / c.Rebuild( c.Int32ul, lambda this: len(this._.code) if "code" in this._ else (this.code_length or 0)), "version" / VersionLong, "fix_version" / VersionLong, "reserved" / c.Padding(8), "hashes" / c.Bytes(32)[16], "v1_signatures" / c.Bytes(64)[V1_SIGNATURE_SLOTS], "v1_key_indexes" / c.Int8ul[V1_SIGNATURE_SLOTS], # pylint: disable=E1136 "reserved" / c.Padding(220), "sigmask" / c.Byte, "signature" / c.Bytes(64), "_end_offset" / c.Tell, "_rebuild_header_len" / c.If( c.this.version.major > 1, c.Pointer( c.this._start_offset + 4, c.Rebuild(c.Int32ul, c.this._end_offset - c.this._start_offset)), ), ) Firmware = c.Struct( "vendor_header" / VendorHeader, "firmware_header" / FirmwareHeader, "_code_offset" / c.Tell, "code" / c.Bytes(c.this.firmware_header.code_length), c.Terminated, ) FirmwareOneV2 = c.Struct( "firmware_header" / FirmwareHeader, "_code_offset" / c.Tell,
"field_0A" / construct.Int8ul, # + 0x0A "field_0B" / construct.Int8ul, # + 0x0B #construct.Probe(), # F****D DANS ADEMO #"addr_delta" / construct.If(lambda ctx: ctx.field_04 != 0, construct.Array(lambda ctx: (ctx.field_04 / 4) - 5, construct.Int32ul)), construct.Int32ul, construct.Int32ul, ) unk_header = construct.Struct( "unk_byte_00_f" / construct.Int8ul, # + 0x00 "unk_byte_01_f" / construct.Int8ul, # + 0x01 "width" / construct.Int16ul, # + 0x02 "height" / construct.Int16ul, # + 0x04 "anim" / construct.If(lambda ctx: ctx.unk_byte_01_f & 0x01 != 0x00, anim_das), "data" / construct.OnDemand( construct.Array(lambda ctx: ctx.width * ctx.height, construct.Byte))) # # DAS file # das_file = construct.Struct( "signature" / construct.Const("\x44\x41\x53\x50"), # + 0x00 "version" / construct.Int16ul, # + 0x04 "image_record_length" / construct.Int16ul, # + 0x06 "image_record_offset" / construct.Int32ul, # + 0x08 "palette_offset" / construct.Int32ul, # + 0x0C "ns_offset_01" / construct.Int32ul, # + 0x10 // length = 0x1000 "string_table_offset" / construct.Int32ul, # + 0x14 "string_table_length" / construct.Int16ul, # + 0x18
)) mac_header_t = ct.Struct( "fcf" / fcf_t, "seqnum" / ct.Hex(ct.Int8ul), "dst_addr" / ct.Switch( lambda ctx: int(ctx.fcf.dst_addressing_mode), { int(addressing_mode_t.SHORT): short_addr_t, int(addressing_mode_t.LONG): long_addr_t, }), "src_addr" / ct.If( lambda ctx: is_address_present(ctx.fcf.src_addressing_mode), ct.Struct( "pan_id" / ct.IfThenElse( lambda ctx: ctx._.fcf.pan_id_comp and is_address_present( ctx._.fcf.dst_addressing_mode), ct.Computed(ct.this._.dst_addr.pan_id), ct.Hex(ct.Int16ul)), "addr" / ct.Switch( lambda ctx: int(ctx._.fcf.src_addressing_mode), { int(addressing_mode_t.SHORT): ct.Hex(ct.Int16ul), int(addressing_mode_t.LONG): ct.Hex(ct.Int64ul) }))), ) mpdu_t = ct.Struct( "mac" / mac_header_t, "pdu_offset" / ct.Tell, "pdu" / ct.ExprAdapter(ct.HexDump(ct.GreedyBytes), ct.obj_[:-2], ct.obj_ + "AA"), ct.Seek(-2, ct.io.SEEK_CUR), "fcs_offset" / ct.Tell, ct.If(ct.this.pdu_offset > ct.this.fcs_offset, ct.Error), "fcs" / ct.Hex(ct.Int16ul)) phr_t = ct.BitStruct("reserved" / ct.Bit, "size" / ct.BitsInteger(7))
def _struct(cls): return construct.Struct( "signature" / construct.Const(b"LivePrevMenu"), "version" / LPMVersionValidator(_LPMVersionAdapter(construct.Bytes(3))), "unk1" / construct.Bytes(8), "buttons" / construct.PrefixedArray( construct.Int32ul, construct.Struct( "width" / construct.Int32ul, "height" / construct.Int32ul, "src" / construct.PascalString(construct.Int32ul, "cp932"), "unk2" / construct.Byte, "name" / construct.PascalString(construct.Int32ul, "cp932"), "src_selected" / construct.PascalString(construct.Int32ul, "cp932"), "unk3" / construct.PascalString(construct.Int32ul, "cp932"), "unk4" / construct.PascalString(construct.Int32ul, "cp932"), "unk5" / construct.If( construct.this._._.version > 100, construct.PascalString(construct.Int32ul, "cp932"), ), "unk6" / construct.If( construct.this._._.version > 102, construct.Struct( construct.PascalString(construct.Int32ul, "cp932"), construct.PascalString(construct.Int32ul, "cp932"), ), ), "unk7" / construct.PascalString(construct.Int32ul, "cp932"), "unk8" / construct.PascalString(construct.Int32ul, "cp932"), "unk9" / construct.PascalString(construct.Int32ul, "cp932"), "unk10" / construct.If( construct.this._._.version > 101, construct.Struct( construct.PascalString(construct.Int32ul, "cp932"), construct.PascalString(construct.Int32ul, "cp932"), ), ), "unk15" / construct.Int32ul, "unk16" / construct.Int32ul, "unk17" / construct.PascalString(construct.Int32ul, "cp932"), "unk18" / construct.If( construct.this._._.version > 103, construct.Struct( construct.PascalString(construct.Int32ul, "cp932"), construct.PascalString(construct.Int32ul, "cp932"), construct.PascalString(construct.Int32ul, "cp932"), construct.PascalString(construct.Int32ul, "cp932"), construct.PascalString(construct.Int32ul, "cp932"), construct.Int32ul, ), ), "unk19" / construct.If( construct.this._._.version > 104, construct.PascalString(construct.Int32ul, "cp932"), ), "unk20" / construct.If( construct.this._._.version > 105, construct.PascalString(construct.Int32ul, "cp932"), ), ), ), )
c.ULInt8("version"), c.ULInt16("server_count"), c.MetaRepeater(lambda ctx: ctx["server_count"], c.Struct("servers", IpAddress("host"), c.ULInt16("port") )), ) GameInfo = c.Struct("gameinfo", Header, c.ULInt8("version"), c.If(lambda ctx: ctx["version"] >= 4, c.Struct("newgrf_info", c.ULInt8("newgrf_count"), c.MetaRepeater(lambda ctx: ctx["newgrf_count"], c.Struct("newgrfs", GrfID("grfid"), MD5("md5"), )) ) ), c.If(lambda ctx: ctx["version"] >= 3, c.Struct("ext_date", OpenTTD_NewDate("current"), OpenTTD_NewDate("start"), ) ), c.If(lambda ctx: ctx["version"] >= 2, c.Struct("ext_limits", c.ULInt8("companies_max"), c.ULInt8("companies_current"), c.ULInt8("spectators_max")
def _struct(cls): return construct.Struct( "version" / LsbVersionValidator(construct.Int32ul), "project_name" / construct.PascalString(construct.Int32ul, "cp932"), "unk1" / construct.Int64ul, "unk2" / construct.Int64ul, "init_lsb" / construct.PascalString(construct.Int32ul, "cp932"), "exit_lsb" / construct.If( construct.this.version > 0x6D, construct.PascalString(construct.Int32ul, "cp932"), ), "project_dir" / construct.PascalString(construct.Int32ul, "cp932"), "unk3" / construct.Int32ul, "bool1" / construct.Byte, "bool2" / construct.If( construct.this.version >= 0x6A, construct.Byte, ), "audio_formats" / construct.If( construct.this.version >= 0x6D, construct.PascalString(construct.Int32ul, "cp932"), ), "bool3" / construct.If( construct.this.version >= 0x71, construct.Byte, ), "bool4" / construct.If( construct.this.version >= 0x72, construct.Byte, ), "bool5" / construct.If( construct.this.version >= 0x74, construct.Byte, ), "insert_disk_prompt" / construct.PascalString(construct.Int32ul, "cp932"), "exit_prompt" / construct.PascalString(construct.Int32ul, "cp932"), "system_settings" / construct.PrefixedArray( construct.Int32ul, construct.Struct( "type" / construct.Enum(construct.Byte, ParamType), "name" / construct.PascalString(construct.Int32ul, "cp932"), "value" / construct.Switch( construct.this.type, { "Int": construct.Int32sl, "Float": construct.ExprAdapter( construct.Bytes(10), lambda obj, ctx: numpy.frombuffer( obj.rjust(16, b"\x00"), dtype=numpy.longdouble), lambda obj, ctx: numpy.longdouble(obj).tobytes( )[-10:], ), "Flag": construct.Byte, "Str": construct.PascalString(construct.Int32ul, "cp932"), }, ), ), ), )
class FlacReader: FRAME_HEADER = construct.Struct( 'frame_header', construct.Bits('sync', 14), construct.Bits('reserved', 2), construct.Bits('block_size', 4), construct.Bits('sample_rate', 4), construct.Bits('channel_assignment', 4), construct.Bits('bits_per_sample', 3), construct.Padding(1), construct.IfThenElse( 'total_channels', lambda ctx1: ctx1['channel_assignment'] <= 7, construct.Value('c', lambda ctx2: ctx2['channel_assignment'] + 1), construct.Value('c', lambda ctx3: 2)), UTF8('frame_number'), construct.IfThenElse( 'extended_block_size', lambda ctx1: ctx1['block_size'] == 6, construct.Bits('b', 8), construct.If(lambda ctx2: ctx2['block_size'] == 7, construct.Bits('b', 16))), construct.IfThenElse( 'extended_sample_rate', lambda ctx1: ctx1['sample_rate'] == 12, construct.Bits('s', 8), construct.If(lambda ctx2: ctx2['sample_rate'] in (13, 14), construct.Bits('s', 16))), construct.Bits('crc8', 8)) UNARY = construct.Struct( 'unary', construct.RepeatUntil(lambda obj, ctx: obj == '\x01', construct.Field('bytes', 1)), construct.Value('value', lambda ctx: len(ctx['bytes']) - 1)) SUBFRAME_HEADER = construct.Struct( 'subframe_header', construct.Padding(1), construct.Bits('subframe_type', 6), construct.Flag('has_wasted_bits_per_sample'), construct.IfThenElse('wasted_bits_per_sample', lambda ctx: ctx['has_wasted_bits_per_sample'], PlusOne(Unary('value')), construct.Value('value', lambda ctx2: 0))) GET_BLOCKSIZE_FROM_STREAMINFO = -1 GET_8BIT_BLOCKSIZE_FROM_END_OF_HEADER = -2 GET_16BIT_BLOCKSIZE_FROM_END_OF_HEADER = -3 BLOCK_SIZE = (GET_BLOCKSIZE_FROM_STREAMINFO, 192, 576, 1152, 2304, 4608, GET_8BIT_BLOCKSIZE_FROM_END_OF_HEADER, GET_16BIT_BLOCKSIZE_FROM_END_OF_HEADER, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768) GET_SAMPLE_SIZE_FROM_STREAMINFO = -1 SAMPLE_SIZE = (GET_SAMPLE_SIZE_FROM_STREAMINFO, 8, 12, None, 16, 20, 24, None) def FIXED0(subframe, residual, i): subframe.insert(i, residual[i]) def FIXED1(subframe, residual, i): subframe.insert(i, subframe[i - 1] + residual[i]) def FIXED2(subframe, residual, i): subframe.insert(i, ((2 * subframe[i - 1]) - subframe[i - 2] + \ residual[i])) def FIXED3(subframe, residual, i): subframe.insert(i, ((3 * subframe[i - 1]) - (3 * subframe[i - 2]) + \ subframe[i - 3] + residual[i])) def FIXED4(subframe, residual, i): subframe.insert(i, ((4 * subframe[i - 1]) - (6 * subframe[i - 2]) + \ (4 * subframe[i - 3]) - subframe[i - 4] + residual[i])) #iterates over all of the channels, in order def MERGE_INDEPENDENT(channel_list): channel_data = [iter(c) for c in channel_list] while (True): for channel in channel_data: yield channel.next() def MERGE_LEFT(channel_list): channel_left = iter(channel_list[0]) channel_side = iter(channel_list[1]) while (True): left = channel_left.next() side = channel_side.next() yield left yield left - side def MERGE_RIGHT(channel_list): channel_side = iter(channel_list[0]) channel_right = iter(channel_list[1]) while (True): side = channel_side.next() right = channel_right.next() yield side + right yield right def MERGE_MID(channel_list): channel_mid = iter(channel_list[0]) channel_side = iter(channel_list[1]) while (True): mid = channel_mid.next() side = channel_side.next() mid = mid << 1 mid |= (side & 0x1) yield (mid + side) >> 1 yield (mid - side) >> 1 CHANNEL_FUNCTIONS = (MERGE_INDEPENDENT, MERGE_INDEPENDENT, MERGE_INDEPENDENT, MERGE_INDEPENDENT, MERGE_INDEPENDENT, MERGE_INDEPENDENT, MERGE_INDEPENDENT, MERGE_INDEPENDENT, MERGE_LEFT, MERGE_RIGHT, MERGE_MID) FIXED_FUNCTIONS = (FIXED0, FIXED1, FIXED2, FIXED3, FIXED4) def __init__(self, flac_stream): self.stream = BufferedStream(flac_stream) self.streaminfo = None self.bitstream = None #ensure the file starts with 'fLaC' self.read_stream_marker() #initialize self.bitstream self.begin_bitstream() #find self.streaminfo in case we need it self.read_metadata_blocks() def close(self): if (self.bitstream != None): self.bitstream.close() else: self.stream.close() def read_stream_marker(self): if (self.stream.read(4) != 'fLaC'): raise FlacStreamException('invalid stream marker') def read_metadata_blocks(self): block = audiotools.FlacAudio.METADATA_BLOCK_HEADER.parse_stream( self.stream) while (block.last_block == 0): if (block.block_type == 0): self.streaminfo = audiotools.FlacAudio.STREAMINFO.parse_stream( self.stream) else: self.stream.seek(block.block_length, 1) block = audiotools.FlacAudio.METADATA_BLOCK_HEADER.parse_stream( self.stream) self.stream.seek(block.block_length, 1) def begin_bitstream(self): import bitstream #self.bitstream = construct.BitStreamReader(self.stream) self.bitstream = bitstream.BitStreamReader(self.stream) def read_frame(self): self.stream.reset_buffer() try: header = FlacReader.FRAME_HEADER.parse_stream(self.bitstream) except construct.core.FieldError: return "" if (header.sync != 0x3FFE): raise FlacStreamException('invalid sync') if (crc8(self.stream.getvalue()[0:-1]) != header.crc8): raise FlacStreamException('crc8 checksum failed') #block_size tells us how many samples we need from each subframe block_size = FlacReader.BLOCK_SIZE[header.block_size] if (block_size == self.GET_BLOCKSIZE_FROM_STREAMINFO): block_size = self.streaminfo.maximum_blocksize elif ((block_size == self.GET_8BIT_BLOCKSIZE_FROM_END_OF_HEADER) or (block_size == self.GET_16BIT_BLOCKSIZE_FROM_END_OF_HEADER)): block_size = header.extended_block_size + 1 #grab subframe data as 32-bit array objects subframe_data = [] for channel_number in xrange(header.total_channels): subframe_data.append( self.read_subframe(header, block_size, channel_number)) crc16sum = crc16(self.stream.getvalue()) #try to byte-align the stream if (len(self.bitstream.buffer) > 0): self.bitstream.read(len(self.bitstream.buffer)) if (crc16sum != construct.Bits('crc16', 16).parse_stream( self.bitstream)): raise FlacStreamException('crc16 checksum failed') #convert our list of subframe data arrays into #a string of sample data if (FlacReader.SAMPLE_SIZE[header.bits_per_sample] == 16): merged_frames = array.array( 'h', FlacReader.CHANNEL_FUNCTIONS[header.channel_assignment]( subframe_data)) if (audiotools.BIG_ENDIAN): merged_frames.byteswap() return merged_frames.tostring() elif (FlacReader.SAMPLE_SIZE[header.bits_per_sample] == 8): merged_frames = array.array( 'b', FlacReader.CHANNEL_FUNCTIONS[header.channel_assignment]( subframe_data)) return merged_frames.tostring() else: if (FlacReader.SAMPLE_SIZE[header.bits_per_sample] == \ self.GET_SAMPLE_SIZE_FROM_STREAMINFO): bits_per_sample = self.streaminfo.bits_per_sample + 1 elif (FlacReader.SAMPLE_SIZE[header.bits_per_sample] == None): raise FlacStreamException('invalid bits per sample') else: bits_per_sample = FlacReader.SAMPLE_SIZE[ header.bits_per_sample] stream = construct.GreedyRepeater( construct.BitStruct( 'bits', construct.Bits('value', bits_per_sample, swapped=True, signed=True))) return stream.build([ construct.Container(value=v) for v in FlacReader.CHANNEL_FUNCTIONS[ header.channel_assignment](subframe_data) ]) def read_subframe(self, frame_header, block_size, channel_number): subframe_header = \ FlacReader.SUBFRAME_HEADER.parse_stream(self.bitstream) #figure out the bits-per-sample of this subframe if ((frame_header.channel_assignment == 8) and (channel_number == 1)): #if channel is stored as left+difference #and this is the difference, add 1 bit bits_per_sample = FlacReader.SAMPLE_SIZE[ frame_header.bits_per_sample] + 1 elif ((frame_header.channel_assignment == 9) and (channel_number == 0)): #if channel is stored as difference+right #and this is the difference, add 1 bit bits_per_sample = FlacReader.SAMPLE_SIZE[ frame_header.bits_per_sample] + 1 elif ((frame_header.channel_assignment == 10) and (channel_number == 1)): #if channel is stored as average+difference #and this is the difference, add 1 bit bits_per_sample = FlacReader.SAMPLE_SIZE[ frame_header.bits_per_sample] + 1 else: #otherwise, use the number from the frame header bits_per_sample = FlacReader.SAMPLE_SIZE[ frame_header.bits_per_sample] if (subframe_header.has_wasted_bits_per_sample): bits_per_sample -= subframe_header.wasted_bits_per_sample if (subframe_header.subframe_type == 0): subframe = self.read_subframe_constant(block_size, bits_per_sample) elif (subframe_header.subframe_type == 1): subframe = self.read_subframe_verbatim(block_size, bits_per_sample) elif ((subframe_header.subframe_type & 0x38) == 0x08): subframe = self.read_subframe_fixed( subframe_header.subframe_type & 0x07, block_size, bits_per_sample) elif ((subframe_header.subframe_type & 0x20) == 0x20): subframe = self.read_subframe_lpc( (subframe_header.subframe_type & 0x1F) + 1, block_size, bits_per_sample) else: raise FlacStreamException('invalid subframe type') if (subframe_header.has_wasted_bits_per_sample): return array.array('i', [ i << subframe_header.wasted_bits_per_sample for i in subframe ]) else: return subframe def read_subframe_constant(self, block_size, bits_per_sample): sample = construct.Bits('b', bits_per_sample).parse_stream(self.bitstream) subframe = array.array('i', [sample] * block_size) return subframe def read_subframe_verbatim(self, block_size, bits_per_sample): return array.array( 'i', construct.StrictRepeater( block_size, construct.Bits("samples", bits_per_sample, signed=True)).parse_stream(self.bitstream)) def read_subframe_fixed(self, order, block_size, bits_per_sample): samples = construct.StrictRepeater( order, construct.Bits("warm_up_samples", bits_per_sample, signed=True)) subframe = array.array('i', samples.parse_stream(self.bitstream)) residual = self.read_residual(block_size, order) fixed_func = self.FIXED_FUNCTIONS[order] for i in xrange(len(subframe), block_size): fixed_func(subframe, residual, i) return subframe def read_subframe_lpc(self, order, block_size, bits_per_sample): samples = construct.StrictRepeater( order, construct.Bits("warm_up_samples", bits_per_sample, signed=True)) subframe = array.array('i', samples.parse_stream(self.bitstream)) lpc_precision = construct.Bits('lpc_precision', 4).parse_stream( self.bitstream) + 1 lpc_shift = construct.Bits('lpc_shift', 5).parse_stream(self.bitstream) coefficients = array.array( 'i', construct.StrictRepeater( order, construct.Bits('coefficients', lpc_precision, signed=True)).parse_stream(self.bitstream)) residual = self.read_residual(block_size, order) for i in xrange(len(subframe), block_size): subframe.insert(i, (sum( [coefficients[j] * subframe[i - j - 1] for j in xrange(0,len(coefficients))]) >> lpc_shift) + \ residual[i]) return subframe def read_residual(self, block_size, predictor_order): rice = array.array('i') #add some dummy rice so that the Rice index matches #that of the rest of the subframe for i in xrange(predictor_order): rice.append(0) coding_method = self.bitstream.read(2) if (coding_method == '\x00\x00'): rice2 = False elif (coding_method == '\x00\x01'): rice2 = True else: raise FlacStreamException('invalid residual coding method') partition_order = construct.Bits('partition_order', 4).parse_stream(self.bitstream) if (partition_order > 0): total_samples = ((block_size / 2**partition_order) - predictor_order) rice.extend(self.read_encoded_rice(total_samples, rice2)) for i in xrange(1, 2**partition_order): total_samples = (block_size / 2**partition_order) rice.extend(self.read_encoded_rice(total_samples, rice2)) else: rice.extend( self.read_encoded_rice(block_size - predictor_order, rice2)) return rice def read_encoded_rice(self, total_samples, rice2=False): bin_to_int = construct.lib.binary.bin_to_int samples = array.array('i') if (not rice2): rice_parameter = construct.Bits('rice_parameter', 4).parse_stream(self.bitstream) else: rice_parameter = construct.Bits('rice_parameter', 5).parse_stream(self.bitstream) if (rice_parameter != 0xF): #a Rice encoded residual for x in xrange(total_samples): #count the number of 0 bits before the next 1 bit #(unary encoding) #to find our most significant bits msb = 0 s = self.bitstream.read(1) while (s != '\x01'): msb += 1 s = self.bitstream.read(1) #grab the proper number of least significant bits lsb = bin_to_int(self.bitstream.read(rice_parameter)) #combine msb and lsb to get the Rice-encoded value value = (msb << rice_parameter) | lsb if ((value & 0x1) == 0x1): #negative samples.append(-(value >> 1) - 1) else: #positive samples.append(value >> 1) else: #unencoded residual bits_per_sample = construct.Bits('escape_code', 5).parse_stream(self.bitstream) sample = construct.Bits("sample", bits_per_sample, signed=True) for x in xrange(total_samples): samples.append(sample.parse_stream(self.bitstream)) return samples
PermalinkBinary = construct.FocusedSeq( "fields", schema_version=construct.Const(_CURRENT_SCHEMA_VERSION, construct.Byte), fields=construct.RawCopy( construct.Aligned( 3, construct.Struct( header=construct.BitStruct( has_seed_hash=construct.Rebuild( construct.Flag, construct.this._.seed_hash != None), bytes_rotation=construct.Rebuild( construct.BitsInteger(7), lambda ctx: single_byte_hash(ctx._.generator_params) >> 1, )), seed_hash=construct.If(construct.this.header.has_seed_hash, construct.Bytes(5)), randovania_version=construct.Bytes(4), # short git hash generator_params=construct.ExprAdapter( construct.Prefixed(construct.VarInt, construct.GreedyBytes), # parsing decoder=create_rotator(inverse=True), # building encoder=create_rotator(inverse=False), ), ))), permalink_checksum=construct.Checksum( construct.Bytes(2), lambda data: hashlib.blake2b(data, digest_size=2).digest(), construct.this.fields.data, ),
IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16 ImageOptionalHeader = construct.Struct('ImageOptionalHeader', construct.Enum(construct.ULInt16('Magic'), IMAGE_NT_OPTIONAL_HDR32_MAGIC = 0x10b, IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20b ), construct.ULInt8('MajorLinkerVersion'), construct.ULInt8('MinorLinkerVersion'), construct.ULInt32('SizeOfCode'), construct.ULInt32('SizeOfInitializedData'), construct.ULInt32('SizeOfUninitializedData'), construct.ULInt32('AddressOfEntryPoint'), construct.ULInt32('BaseOfCode'), construct.If(lambda ctx: ctx.Magic == 'IMAGE_NT_OPTIONAL_HDR32_MAGIC', construct.ULInt32('BaseOfData') ), construct.Switch('ImageBase', lambda ctx: ctx.Magic, { 'IMAGE_NT_OPTIONAL_HDR32_MAGIC' : construct.ULInt32('ImageBase_'), 'IMAGE_NT_OPTIONAL_HDR64_MAGIC' : construct.ULInt64('ImageBase_') } ), construct.ULInt32('SectionAlignment'), construct.ULInt32('FileAlignment'), construct.ULInt16('MajorOperatingSystemVersion'), construct.ULInt16('MinorOperatingSystemVersion'), construct.ULInt16('MajorImageVersion'), construct.ULInt16('MinorImageVersion'), construct.ULInt16('MajorSubsystemVersion'), construct.ULInt16('MinorSubsystemVersion'), construct.ULInt32('Win32VersionValue'),
"cam_multiplier_limit" / construct.Int16ub, construct.Padding(2) ) header_cmd2 = construct.Struct( 'JDN_base' / construct.Int16ul, construct.Padding(2), 'seconds' / construct.Int32ul ) header = construct.Struct( 'packet_type' / construct.Int16ul, 'cmd_id' / construct.Int16ul, 'payload_size' / construct.Int16ul, 'seq_id' / construct.Int16ul, construct.Embedded( construct.Switch(lambda ctx: ctx.cmd_id, { 0: construct.If( lambda ctx: ctx.payload_size >= header_cmd0.sizeof(), header_cmd0), 1: construct.If( lambda ctx: ctx.payload_size == header_cmd1.sizeof(), header_cmd1), 2: construct.If( lambda ctx: ctx.payload_size == header_cmd2.sizeof(), header_cmd2) }, default=construct.Pass ) ) )
"dwFlags" / C.Int32sl, #07100A00 #DW Header Flags "dwHeight" / C.Int32ul, "dwWidth" / C.Int32ul, "dwPitchOrLinearSize" / C.Int32ul, #size of data as packed integer (width*height * bpp/8) #Block Compressed: max( 1, ((width+3)/4) ) * block-size #R8G8_B8G8, G8R8_G8B8, legacy UYVY-packed: ((width+1) >> 1) * 4 #Other ( width * bits-per-pixel + 7 ) / 8 "dwDepth" / C.Int32ul, #Only used for volumetric textures "dwMipMapCount" / C.Int32ul, "dwReserved1" / C.Int32ul[11], #0s "ddpfPixelFormat" / DDS_PIXELFORMAT, #typeMagic "ddsCaps" / C.Int32ul[4], "dwReserved2" / C.Int32ul, "dx10Header" / C.If(C.this.ddpfPixelFormat.dwFourCC == "DX10", DX10_Header), ) #If DX10 is set on the pixel format DX10_Header follows ddsTypeEnum = [ "UNKNOWN", "R32G32B32A32_TYPELESS", "R32G32B32A32_FLOAT", "R32G32B32A32_UINT", "R32G32B32A32_SINT", "R32G32B32_TYPELESS", "R32G32B32_FLOAT", "R32G32B32_UINT", "R32G32B32_SINT", "R16G16B16A16_TYPELESS", "R16G16B16A16_FLOAT", "R16G16B16A16_UNORM", "R16G16B16A16_UINT", "R16G16B16A16_SNORM", "R16G16B16A16_SINT", "R32G32_TYPELESS", "R32G32_FLOAT", "R32G32_UINT", "R32G32_SINT", "R32G8X24_TYPELESS", "D32_FLOAT_S8X24_UINT", "R32_FLOAT_X8X24_TYPELESS", "X32_TYPELESS_G8X24_UINT", "R10G10B10A2_TYPELESS", "R10G10B10A2_UNORM", "R10G10B10A2_UINT", "R11G11B10_FLOAT", "R8G8B8A8_TYPELESS", "R8G8B8A8_UNORM", "R8G8B8A8_UNORM_SRGB", "R8G8B8A8_UINT", "R8G8B8A8_SNORM", "R8G8B8A8_SINT", "R16G16_TYPELESS", "R16G16_FLOAT", "R16G16_UNORM",
construct.Rename('description', UNICODE_STRING), construct.ULInt32('size'), construct.Bytes('raw_data', lambda ctx: ctx.size) ) ''' CREDENTIAL_DEC_BLOCK_ENC = construct.Struct( 'CREDENTIAL_DEC_BLOCK_ENC', construct.ULInt32('empty'), construct.Rename('block_name', UNICODE_STRING), construct.ULInt32('size'), construct.Bytes('raw_data', lambda ctx: ctx.size)) CREDENTIAL_DECRYPTED = construct.Struct( 'CREDENTIAL_DECRYPTED', construct.Rename('header', CREDENTIAL_DEC_HEADER), construct.Rename('main', CREDENTIAL_DEC_MAIN), construct.If( lambda ctx: ctx.header.unk_type == 2, construct.Array(lambda ctx: ctx.header.unk_blocks, CREDENTIAL_DEC_BLOCK_ENC))) # VAULT file structs. VAULT_ATTRIBUTE_ENCRYPTED = construct.Struct( 'VAULT_ATTRIBUTE_ENCRYPTED', construct.Byte('has_iv'), construct.IfThenElse( '', lambda ctx: ctx.has_iv, construct.Embed( construct.Struct( 'encrypted', construct.ULInt32('iv_size'), construct.Bytes('iv', lambda ctx: ctx.iv_size), construct.Bytes('data', lambda ctx: ctx.size - 1 - 4 - ctx.iv_size))), construct.Embed(