def _ReadFileHeader(self, file_object): """Reads a file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('firefox_cache1_map_header') file_header, file_header_data_size = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER) if file_header.major_format_version != 1: raise errors.ParseError( 'Unsupported major format version: {0:d}'.format( file_header.major_format_version)) if file_header.data_size != (self._file_size - file_header_data_size): raise errors.ParseError( 'Data size does not correspond with file size.')
def _ReadEntries(self, file_object): """Reads entries. Args: file_object (file): file-like object. Raises: ParseError: if the entries cannot be read. """ file_offset = 0 data_type_map = self._GetDataTypeMap('macosx_utmpx_entry') entry, entry_data_size = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'entry') if self._debug: self._DebugPrintEntry(entry) if not entry.username.startswith(b'utmpx-1.00\x00'): raise errors.ParseError('Unsupported file header signature.') if entry.type != 10: raise errors.ParseError('Unsupported file header type of login.') file_offset += entry_data_size while file_offset < self._file_size: entry, entry_data_size = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'entry') if self._debug: self._DebugPrintEntry(entry) file_offset += entry_data_size
def _ReadMemberHeader(self, file_object): """Reads a member header. Args: file_object (file): file-like object. Raises: ParseError: if the member header cannot be read. """ file_offset = file_object.tell() data_type_map = self._GetDataTypeMap('gzip_member_header') member_header, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'member header') if self._debug: self._DebugPrintStructureObject(member_header, self._DEBUG_INFO_MEMBER_HEADER) if member_header.signature != self._GZIP_SIGNATURE: raise errors.ParseError('Unsupported signature: 0x{0:04x}.'.format( member_header.signature)) if member_header.compression_method != self._COMPRESSION_METHOD_DEFLATE: raise errors.ParseError( 'Unsupported compression method: {0:d}.'.format( member_header.compression_method)) if member_header.flags & self._FLAG_FEXTRA: file_offset = file_object.tell() data_type_map = self._GetDataTypeMap('uint16le') extra_field_data_size, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'extra field data size') file_object.seek(extra_field_data_size, os.SEEK_CUR) if member_header.flags & self._FLAG_FNAME: file_offset = file_object.tell() data_type_map = self._GetDataTypeMap('cstring') value_string, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'original filename') if self._debug: self._DebugPrintValue('Original filename', value_string) if member_header.flags & self._FLAG_FCOMMENT: file_offset = file_object.tell() data_type_map = self._GetDataTypeMap('cstring') value_string, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'comment') if self._debug: self._DebugPrintValue('Comment', value_string) if member_header.flags & self._FLAG_FHCRC: file_object.read(2)
def _ReadFileHeader(self, file_object): """Reads a file header. Args: file_object (file): file-like object. Returns: uuidtext_file_header: a file header. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('uuidtext_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER) if file_header.signature != 0x66778899: raise errors.ParseError('Unsupported signature: 0x{0:04x}.'.format( file_header.signature)) format_version = (file_header.major_format_version, file_header.minor_format_version) if format_version != (2, 1): raise errors.ParseError( 'Unsupported format version: {0:d}.{1:d}.'.format( file_header.major_format_version, file_header.minor_format_version)) return file_header
def _ReadVolumePath(self, record_data, volume_path_offset): """Reads the volume path. Args: record_data (bytes): record data. volume_path_offset (int): volume path offset relative to the start of the file-like object. Raises: ParseError: if the volume path cannot be read. """ data_type_map = self._GetDataTypeMap( 'rp_change_log_volume_path_record') try: volume_path_record = self._ReadStructureFromByteStream( record_data, volume_path_offset, data_type_map, 'volume path record') except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse volume path record with error: {0!s}'.format( exception)) if self._debug: self._DebugPrintRecordHeader(volume_path_record) self._DebugPrintValue('Volume path', volume_path_record.volume_path[:-1]) self._DebugPrintText('\n') if volume_path_record.record_type != 2: raise errors.ParseError('Unsupported record type: {0:d}'.format( volume_path_record.record_type))
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Returns: systemd_journal_file_header: file header. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('systemd_journal_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER) if file_header.signature != self._FILE_SIGNATURE: raise errors.ParseError('Invalid file signature.') if file_header.header_size not in self._SUPPORTED_FILE_HEADER_SIZES: raise errors.ParseError('Unsupported file header size: {0:d}.'.format( file_header.header_size)) if file_header.header_size == 224: self._format_version = 187 elif file_header.header_size == 240: self._format_version = 189 return file_header
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap( 'chrome_cache_data_block_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'data block file header') if self._debug: self._DebugPrintFileHeader(file_header) if file_header.signature != self.SIGNATURE: raise errors.ParseError( 'Unsupported data block file signature: 0x{0:08x}'.format( file_header.signature)) self.format_version = '{0:d}.{1:d}'.format(file_header.major_version, file_header.minor_version) if self.format_version not in ('2.0', '2.1'): raise errors.ParseError( 'Unsupported data block file version: {0:s}'.format( self.format_version)) self.block_size = file_header.block_size self.number_of_entries = file_header.number_of_entries
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('chrome_cache_index_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'index file header') if self._debug: self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER) if file_header.signature != self.SIGNATURE: raise errors.ParseError( 'Unsupported index file signature: 0x{0:08x}'.format( file_header.signature)) self.format_version = '{0:d}.{1:d}'.format(file_header.major_version, file_header.minor_version) if self.format_version not in ('2.0', '2.1'): raise errors.ParseError( 'Unsupported index file version: {0:s}'.format( self.format_version)) self.creation_time = file_header.creation_time
def _ReadFileHeader(self, file_object): """Reads a file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('tzif_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintFileHeader(file_header) if file_header.signature != self._FILE_SIGNATURE: raise errors.ParseError('Unsupported file signature.') if file_header.format_version not in (0x00, 0x32, 0x33): raise errors.ParseError('Unsupported format version: {0:d}.'.format( file_header.format_version)) return file_header
def ParseFile(self, path): """Parses a Chrome Cache file. Args: path (str): path of the file. Raises: ParseError: if the file cannot be read. """ with open(path, 'rb') as file_object: signature_data = file_object.read(self._UINT32LE_SIZE) try: signature = self._UINT32LE.MapByteStream(signature_data) except dtfabric_errors.MappingError as exception: raise errors.ParseError( 'Unable to signature with error: {0!s}'.format(exception)) if signature not in (DataBlockFile.SIGNATURE, IndexFile.SIGNATURE): raise errors.ParseError( 'Unsupported signature: 0x{0:08x}'.format(signature)) if signature == DataBlockFile.SIGNATURE: chrome_cache_file = DataBlockFile( debug=self._debug, output_writer=self._output_writer) elif signature == IndexFile.SIGNATURE: chrome_cache_file = IndexFile( debug=self._debug, output_writer=self._output_writer) chrome_cache_file.ReadFileObject(file_object)
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('rp_change_log_file_header') file_header, file_header_data_size = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintFileHeader(file_header) if file_header.signature != self._RECORD_SIGNATURE: raise errors.ParseError('Unsupported change.log file signature') if file_header.record_type != 0: raise errors.ParseError('Unsupported record type: {0:d}'.format( file_header.record_type)) if file_header.format_version != 2: raise errors.ParseError( 'Unsupported change.log format version: {0:d}'.format( file_header.format_version)) file_offset = file_header_data_size record_size = file_header.record_size - file_header_data_size record_data = file_object.read(record_size) if self._debug: self._DebugPrintData('Record data', record_data) self._ReadVolumePath(record_data[:-4], file_offset) data_type_map = self._GetDataTypeMap('uint32le') try: copy_of_record_size = self._ReadStructureFromByteStream( record_data[-4:], file_offset, data_type_map, 'copy of record size') except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse copy of record size with error: {0!s}'.format( exception)) if self._debug: value_string = '{0:d}'.format(copy_of_record_size) self._DebugPrintValue('Copy of record size', value_string) self._DebugPrintText('\n') if file_header.record_size != copy_of_record_size: raise errors.ParseError( 'Record size mismatch ({0:d} != {1:d})'.format( file_header.record_size, copy_of_record_size))
def _ReadRecord(self, file_object, file_offset): """Reads an event record. Args: file_object (file): file-like object. file_offset (int): offset of the token relative to the start of the file-like object. Raises: ParseError: if the event record cannot be read. """ token_type = self._ReadTokenType(file_object, file_offset) if token_type not in self._HEADER_TOKEN_TYPES: raise errors.ParseError( 'Unsupported header token type: 0x{0:02x}'.format(token_type)) token_type, token_data = self._ReadToken(file_object, file_offset) if self._debug: debug_information = self._DEBUG_INFO_TOKEN_DATA.get( token_type, None) if debug_information: self._DebugPrintStructureObject(token_data, debug_information) if token_data.format_version != 11: raise errors.ParseError( 'Unsupported format version type: {0:d}'.format( token_data.format_version)) header_record_size = token_data.record_size record_end_offset = file_offset + header_record_size while file_offset < record_end_offset: token_type, token_data = self._ReadToken(file_object, file_offset) if not token_data: raise errors.ParseError( 'Unsupported token type: 0x{0:02x}'.format(token_type)) # TODO: add callback for validation (trailer) and read of more complex # structures. file_offset = file_object.tell() if self._debug: debug_information = self._DEBUG_INFO_TOKEN_DATA.get( token_type, None) if debug_information: self._DebugPrintStructureObject(token_data, debug_information) if token_type == self._TRAILER_TOKEN_TYPE: break if token_data.signature != self._TRAILER_TOKEN_SIGNATURE: raise errors.ParseError('Unsupported signature in trailer token.') if token_data.record_size != header_record_size: raise errors.ParseError( 'Mismatch of event record size between header and trailer token.' )
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ file_offset = file_object.tell() data_type_map = self._GetDataTypeMap('custom_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'file header') if self._debug: self._DebugPrintFileHeader(file_header) if file_header.unknown1 != 2: raise errors.ParseError('Unsupported unknown1: {0:d}.'.format( file_header.unknown1)) if file_header.header_values_type > 2: raise errors.ParseError('Unsupported header value type: {0:d}.'.format( file_header.header_values_type)) if file_header.header_values_type == 0: data_type_map = self._GetDataTypeMap('custom_file_header_value_type_0') else: data_type_map = self._GetDataTypeMap( 'custom_file_header_value_type_1_or_2') file_offset = file_object.tell() # TODO: implement read file_header_value_data for HEADER_VALUE_TYPE_0 data_size = data_type_map.GetByteSize() file_header_value_data = file_object.read(data_size) file_header_value = self._ReadStructureFromByteStream( file_header_value_data, file_offset, data_type_map, 'custom file header value') if self._debug: if file_header.header_values_type == 0: value_string = '{0:d}'.format(file_header_value.number_of_characters) self._DebugPrintValue('Number of characters', value_string) # TODO: print string. value_string = '{0:d}'.format(file_header_value.number_of_entries) self._DebugPrintValue('Number of entries', value_string) self._DebugPrintText('\n')
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('custom_file_header') file_header, file_offset = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER) if file_header.unknown1 != 2: raise errors.ParseError('Unsupported unknown1: {0:d}.'.format( file_header.unknown1)) if file_header.header_values_type > 2: raise errors.ParseError( 'Unsupported header value type: {0:d}.'.format( file_header.header_values_type)) if file_header.header_values_type == 0: data_type_map_name = 'custom_file_header_value_type_0' else: data_type_map_name = 'custom_file_header_value_type_1_or_2' data_type_map = self._GetDataTypeMap(data_type_map_name) file_header_value, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'custom file header value') if self._debug: if file_header.header_values_type == 0: value_string = '{0:d}'.format( file_header_value.number_of_characters) self._DebugPrintValue('Number of characters', value_string) # TODO: print string. value_string = '{0:d}'.format(file_header_value.number_of_entries) self._DebugPrintValue('Number of entries', value_string) self._DebugPrintText('\n')
def _ReadFileEntry(self, file_object): """Reads the file entry. Args: file_object (file): file-like object. Raises: ParseError: if the file entry cannot be read. """ file_offset = file_object.tell() file_entry_data = self._ReadData(file_object, file_offset, self._file_entry_data_size, 'file entry') data_type_map = self._GetDataTypeMap('recycler_info2_file_entry') try: file_entry = self._ReadStructureFromByteStream( file_entry_data, file_offset, data_type_map, 'file entry') except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map file entry data at offset: 0x{0:08x} with error: ' '{1!s}').format(file_offset, exception)) if self._debug: self._DebugPrintStructureObject(file_entry, self._DEBUG_INFO_FILE_ENTRY) if self._file_entry_data_size > 280: file_offset += 280 data_type_map = self._GetDataTypeMap( 'recycler_info2_file_entry_utf16le_string') try: original_filename = self._ReadStructureFromByteStream( file_entry_data[280:], file_offset, data_type_map, 'file entry') except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map file entry data at offset: 0x{0:08x} with error: ' '{1!s}').format(file_offset, exception)) if self._debug: self._DebugPrintValue('Original filename (Unicode)', original_filename) if self._debug: self._DebugPrintText('\n')
def ReadFileObject(self, file_object): """Reads a Windows Metafile Format (WMF) file-like object. Args: file_object (file): file-like object. Raises: ParseError: if the file cannot be read. """ try: signature = file_object.read(4) file_object.seek(-4, os.SEEK_CUR) except IOError as exception: raise errors.ParseError( 'Unable to read file signature with error: {0!s}'.format( exception)) if signature == self._WMF_PLACEABLE_SIGNATURE: self._ReadPlaceable(file_object) self._ReadHeader(file_object) file_offset = file_object.tell() while file_offset < self._file_size: record = self._ReadRecord(file_object, file_offset) file_offset += record.size
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Returns: asl_file_header: file header. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('asl_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER) if file_header.signature != self._FILE_SIGNATURE: raise errors.ParseError('Invalid file signature.') return file_header
def _ReadRecordExtraField(self, byte_stream, file_offset): """Reads a record extra field. Args: byte_stream (bytes): byte stream. file_offset (int): offset of the record extra field relative to the start of the file. Returns: asl_record_extra_field: record extra field. Raises: ParseError: if the record extra field cannot be read. """ data_type_map = self._GetDataTypeMap('asl_record_extra_field') try: record_extra_field = self._ReadStructureFromByteStream( byte_stream, file_offset, data_type_map, 'record extra field') except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to parse record extra field at offset: 0x{0:08x} with error: ' '{1!s}').format(file_offset, exception)) if self._debug: self._DebugPrintStructureObject( record_extra_field, self._DEBUG_INFO_RECORD_EXTRA_FIELD) return record_extra_field
def _ReadCString(self, page_data, string_offset): """Reads a string from the page data. Args: page_data (bytes): page data. string_offset (int): offset of the string relative to the start of the page. Returns: str: string. Raises: ParseError: if the string cannot be read. """ data_type_map = self._GetDataTypeMap('cstring') try: value_string = self._ReadStructureFromByteStream( page_data[string_offset:], string_offset, data_type_map, 'cstring') except (ValueError, errors.ParseError) as exception: raise errors.ParseError( ('Unable to parse string at offset: 0x{0:08x} with error: ' '{1!s}').format(string_offset, exception)) return value_string.rstrip('\x00')
def _ReadStructureFromByteStream( self, byte_stream, file_offset, data_type_map, description, context=None): """Reads a structure from a byte stream. Args: byte_stream (bytes): byte stream. file_offset (int): offset of the structure data relative to the start of the file-like object. data_type_map (dtfabric.DataTypeMap): data type map of the structure. description (str): description of the structure. context (Optional[dtfabric.DataTypeMapContext]): data type map context. Returns: object: structure values object. Raises: ParseError: if the structure cannot be read. ValueError: if file-like object or data type map is missing. """ if not byte_stream: raise ValueError('Missing byte stream.') if not data_type_map: raise ValueError('Missing data type map.') try: return data_type_map.MapByteStream(byte_stream, context=context) except (dtfabric_errors.ByteStreamTooSmallError, dtfabric_errors.MappingError) as exception: raise errors.ParseError(( 'Unable to map {0:s} data at offset: 0x{1:08x} with error: ' '{2!s}').format(description, file_offset, exception))
def _ReadLNKFile(self, file_object): """Reads a LNK file. Args: file_object (file): file-like object. Returns: LNKFileEntry: a LNK file entry. Raises: ParseError: if the LNK file cannot be read. """ file_offset = file_object.tell() if self._debug: self._DebugPrintText( 'Reading LNK file at offset: 0x{0:08x}\n'.format(file_offset)) identifier = '0x{0:08x}'.format(file_offset) lnk_file_entry = LNKFileEntry(identifier) try: lnk_file_entry.Open(file_object) except IOError as exception: raise errors.ParseError(( 'Unable to parse LNK file at offset: 0x{0:08x} ' 'with error: {1:s}').format(file_offset, exception)) if self._debug: self._DebugPrintText('\n') return lnk_file_entry
def _ReadLNKFile(self, olecf_item): """Reads a LNK file. Args: olecf_item (pyolecf.item): OLECF item. Returns: LNKFileEntry: a LNK file entry. Raises: ParseError: if the LNK file cannot be read. """ if self._debug: text = 'Reading LNK file from stream: {0:s}'.format(olecf_item.name) self._DebugPrintText(text) lnk_file_entry = LNKFileEntry(olecf_item.name) try: lnk_file_entry.Open(olecf_item) except IOError as exception: raise errors.ParseError(( 'Unable to parse LNK file from stream: {0:s} ' 'with error: {1:s}').format(olecf_item.name, exception)) if self._debug: self._DebugPrintText('\n') return lnk_file_entry
def _GetValueDataAsObject(self, value): """Retrieves the value data as an object. Args: value (pyregf_value): value. Returns: object: data as a Python type. Raises: ParseError: if the value data cannot be read. """ try: if value.type in (1, 2, 6): value_data = value.get_data_as_string() elif value.type in (4, 5, 11): value_data = value.get_data_as_integer() elif value.type == 7: value_data = list(value.get_data_as_multi_string()) else: value_data = value.data except (IOError, OverflowError) as exception: raise errors.ParseError( 'Unable to read data from value: {0:s} with error: {1!s}'. format(value.name, exception)) return value_data
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Returns: recycle_bin_metadata_file_header: file header. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap( 'recycle_bin_metadata_file_header') file_header, _ = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER) if file_header.format_version not in self._SUPPORTED_FORMAT_VERSION: raise errors.ParseError('Unsupported format version: {0:d}'.format( file_header.format_version)) return file_header
def _ReadTransitionTimes64bit(self, file_object, file_header): """Reads 64-bit transition times. Args: file_object (file): file-like object. file_header (tzif_file_header): file header. Raises: ParseError: if the 64-bit transition times cannot be read. """ file_offset = file_object.tell() data_type_map = self._GetDataTypeMap('tzif_transition_times_64bit') data_size = 8 * file_header.number_of_transition_times data = self._ReadData( file_object, file_offset, data_size, '64-bit transition times') context = dtfabric_data_maps.DataTypeMapContext(values={ 'tzif_file_header': file_header}) try: transition_times = self._ReadStructureFromByteStream( data, file_offset, data_type_map, '64-bit transition times', context=context) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to parse 64-bit transition times value with error: ' '{0!s}').format(exception)) if self._debug: self._DebugPrintTransitionTimes(transition_times)
def _ReadOriginalFilename(self, file_object, format_version): """Reads the original filename. Args: file_object (file): file-like object. format_version (int): format version. Returns: str: filename or None on error. Raises: ParseError: if the original filename cannot be read. """ if format_version == 1: data_type_map_name = 'recycle_bin_metadata_utf16le_string' description = 'UTF-16 little-endian string' else: data_type_map_name = 'recycle_bin_metadata_utf16le_string_with_size' description = 'UTF-16 little-endian string with size' file_offset = file_object.tell() data_type_map = self._GetDataTypeMap(data_type_map_name) try: original_filename, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, description) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse original filename with error: {0!s}'.format( exception)) if format_version == 1: return original_filename.rstrip('\x00') return original_filename.string.rstrip('\x00')
def _ReadDateTimeValue(self, byte_stream, file_offset): """Reads a RFC2579 date-time value. Args: byte_stream (bytes): byte stream. file_offset (int): offset of the attribute data relative to the start of the file-like object. Returns: dfdatetime.RFC2579DateTime: RFC2579 date-time stored in the value. Raises: ParseError: when the datetime value cannot be read. """ data_type_map = self._GetDataTypeMap('cups_ipp_datetime_value') try: value = self._ReadStructureFromByteStream( byte_stream, file_offset, data_type_map, 'date-time value') except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse datetime value with error: {0!s}'.format(exception)) rfc2579_date_time_tuple = ( value.year, value.month, value.day, value.hours, value.minutes, value.seconds, value.deciseconds, value.direction_from_utc, value.hours_from_utc, value.minutes_from_utc) return dfdatetime_rfc2579_date_time.RFC2579DateTime( rfc2579_date_time_tuple=rfc2579_date_time_tuple)
def _ReadAttributesGroup(self, file_object): """Reads an attributes group. Args: file_object (file): file-like object. Raises: ParseError: if the attributes group cannot be read. """ data_type_map = self._GetDataTypeMap('int8') tag_value = 0 while tag_value != self._DELIMITER_TAG_END_OF_ATTRIBUTES: file_offset = file_object.tell() tag_value, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map, 'tag value') if tag_value >= 0x10: file_object.seek(file_offset, os.SEEK_SET) self._ReadAttribute(file_object) elif (tag_value != self._DELIMITER_TAG_END_OF_ATTRIBUTES and tag_value not in self._DELIMITER_TAGS): raise errors.ParseError(( 'Unsupported attributes groups start tag value: ' '0x{0:02x}.').format(tag_value))
def ReadFileObject(self, file_object): """Reads binary data from a file-like object. Args: file_object (file): file-like object. Raises: ParseError: if the format signature is not supported. """ file_object.seek(0, os.SEEK_SET) signature_data = file_object.read(6) self.file_format = None if len(signature_data) > 2: if signature_data[:2] == self._CPIO_SIGNATURE_BINARY_BIG_ENDIAN: self.file_format = 'bin-big-endian' elif signature_data[: 2] == self._CPIO_SIGNATURE_BINARY_LITTLE_ENDIAN: self.file_format = 'bin-little-endian' elif signature_data == self._CPIO_SIGNATURE_PORTABLE_ASCII: self.file_format = 'odc' elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII: self.file_format = 'newc' elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII_WITH_CHECKSUM: self.file_format = 'crc' if self.file_format is None: raise errors.ParseError('Unsupported CPIO format.') self._ReadFileEntries(file_object)
def _ReadFileHeader(self, file_object): """Reads the file header. Args: file_object (file): file-like object. Raises: ParseError: if the file header cannot be read. """ data_type_map = self._GetDataTypeMap('binarycookies_file_header') file_header, file_header_data_size = self._ReadStructureFromFileObject( file_object, 0, data_type_map, 'file header') if self._debug: self._DebugPrintFileHeader(file_header) file_offset = file_header_data_size # TODO: move page sizes array into file header, this will require dtFabric # to compare signature as part of data map. # TODO: check for upper limit. page_sizes_data_size = file_header.number_of_pages * 4 page_sizes_data = file_object.read(page_sizes_data_size) if self._debug: self._DebugPrintData('Page sizes data', page_sizes_data) context = dtfabric_data_maps.DataTypeMapContext( values={'binarycookies_file_header': file_header}) data_type_map = self._GetDataTypeMap('binarycookies_page_sizes') try: page_sizes_array = self._ReadStructureFromByteStream( page_sizes_data, file_offset, data_type_map, 'page sizes', context=context) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map page sizes data at offset: 0x{0:08x} with error: ' '{1!s}').format(file_offset, exception)) self._page_sizes = [] if file_header.number_of_pages > 0: for index, page_size in enumerate(page_sizes_array): self._page_sizes.append(page_size) if self._debug: description = 'Page: {0:d} size'.format(index) value_string = '{0:d}'.format(page_size) self._DebugPrintValue(description, value_string) if self._debug: self._DebugPrintText('\n')