def _CollectProgramsCacheFromValue(self, output_writer, key_path, value_name): """Collects Programs Cache from a Windows Registry value. Args: output_writer: the output writer object. key_path: the path of the Programs Cache key. value_name: the name of the Programs Cache value. """ startpage_key = self._registry.GetKeyByPath(key_path) if not startpage_key: return self.key_found = True value = startpage_key.GetValueByName(value_name) if not value: logging.warning(u'Missing {0:s} value in key: {1:s}'.format( value_name, key_path)) return value_data = value.data value_data_size = len(value.data) parser = ProgramsCacheDataParser(debug=self._debug) if self._debug: # TODO: replace WriteText by more output specific method e.g. # WriteValueData. output_writer.WriteText(u'Value data:') output_writer.WriteText(hexdump.Hexdump(value_data)) parser.Parse(value_data, value_data_size)
def _CollectUserAssistFromKey(self, unused_output_writer, guid_sub_key): """Collects the User Assist information from a GUID sub key. Args: output_writer: the output writer object. guid_sub_key: the User Assist GUID Registry key (instance of dfwinreg.WinRegistryKey). """ version_value = guid_sub_key.GetValueByName(u'Version') if not version_value: logging.warning(u'Missing Version value in sub key: {0:s}'.format( guid_sub_key.name)) return format_version = version_value.GetDataAsObject() if format_version == 3: value_data_size = self._USER_ASSIST_V3_STRUCT.sizeof() elif format_version == 5: value_data_size = self._USER_ASSIST_V5_STRUCT.sizeof() print(u'GUID\t\t: {0:s}'.format(guid_sub_key.name)) print(u'Format version\t: {0:d}'.format(format_version)) print(u'') count_sub_key = guid_sub_key.GetSubkeyByName(u'Count') for value in count_sub_key.GetValues(): output_string = u'Original name\t: {0:s}'.format(value.name) print(output_string.encode(u'utf-8')) try: value_name = value.name.decode(u'rot-13') except UnicodeEncodeError as exception: characters = [] for char in value.name: if ord(char) < 128: try: characters.append(char.decode(u'rot-13')) except UnicodeEncodeError: characters.append(char) else: characters.append(char) value_name = u''.join(characters) try: output_string = u'Converted name\t: {0:s}'.format(value_name) print(output_string.encode(u'utf-8')) except UnicodeEncodeError as exception: logging.warning( u'Unable to convert: {0:s} with error: {1:s}'.format( value.name, exception)) print(u'Value data:') print(hexdump.Hexdump(value.data)) if value_name != u'UEME_CTLSESSION': if value_data_size != len(value.data): logging.warning( (u'Version: {0:d} size mismatch (calculated: {1:d}, ' u'stored: {2:d}).').format(format_version, value_data_size, len(value.data))) return if format_version == 3: parsed_data = self._USER_ASSIST_V3_STRUCT.parse(value.data) elif format_version == 5: parsed_data = self._USER_ASSIST_V5_STRUCT.parse(value.data) print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( parsed_data.get(u'unknown1'))) print(u'Execution count\t\t\t\t\t\t\t\t: {0:d}'.format( parsed_data.get(u'execution_count'))) if format_version == 5: print( u'Application focus count\t\t\t\t\t\t\t: {0:d}'.format( parsed_data.get(u'application_focus_count'))) print(u'Application focus duration\t\t\t\t\t\t: {0:d}'. format( parsed_data.get(u'application_focus_duration'))) print(u'Unknown2\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown2'))) print(u'Unknown3\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown3'))) print(u'Unknown4\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown4'))) print(u'Unknown5\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown5'))) print(u'Unknown6\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown6'))) print(u'Unknown7\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown7'))) print(u'Unknown8\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown8'))) print(u'Unknown9\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown9'))) print(u'Unknown10\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown10'))) print(u'Unknown11\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown11'))) print(u'Unknown12\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( parsed_data.get(u'unknown12'))) timestamp = parsed_data.get(u'last_execution_time') date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp / 10)) print(u'Last execution time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'. format(date_string, timestamp)) if format_version == 5: print(u'Unknown13\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( parsed_data.get(u'unknown13'))) print(u'')
def Collect(self, output_writer): """Collects the Task Cache. Args: output_writer (OutputWriter): output writer. """ dynamic_info_size_error_reported = False self.key_found = False task_cache_key = self._registry.GetKeyByPath(self._TASK_CACHE_KEY_PATH) if not task_cache_key: return tasks_key = task_cache_key.GetSubkeyByName(u'Tasks') tree_key = task_cache_key.GetSubkeyByName(u'Tree') if not tasks_key or not tree_key: return self.key_found = True task_guids = {} for sub_key in tree_key.GetSubkeys(): for value_key, id_value in self._GetIdValue(sub_key): # TODO: improve this check to a regex. # The GUID is in the form {%GUID%} and stored an UTF-16 little-endian # string and should be 78 bytes in size. id_value_data_size = len(id_value.data) if id_value_data_size != 78: logging.error(u'Unsupported Id value data size: {0:s}.') continue guid_string = id_value.GetDataAsObject() task_guids[guid_string] = value_key.name for sub_key in tasks_key.GetSubkeys(): dynamic_info_value = sub_key.GetValueByName(u'DynamicInfo') if not dynamic_info_value: continue dynamic_info_value_data = dynamic_info_value.data dynamic_info_value_data_size = len(dynamic_info_value_data) if self._debug: print(u'DynamicInfo value data:') print(hexdump.Hexdump(dynamic_info_value_data)) if dynamic_info_value_data_size == self._DYNAMIC_INFO_STRUCT_SIZE: dynamic_info_struct = self._DYNAMIC_INFO_STRUCT.parse( dynamic_info_value_data) elif dynamic_info_value_data_size == self._DYNAMIC_INFO2_STRUCT_SIZE: dynamic_info_struct = self._DYNAMIC_INFO2_STRUCT.parse( dynamic_info_value_data) else: if not dynamic_info_size_error_reported: logging.error( u'Unsupported DynamicInfo value data size: {0:d}.'. format(dynamic_info_value_data_size)) dynamic_info_size_error_reported = True continue last_registered_time = dynamic_info_struct.get( u'last_registered_time') launch_time = dynamic_info_struct.get(u'launch_time') unknown_time = dynamic_info_struct.get(u'unknown_time') if self._debug: print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dynamic_info_struct.get(u'unknown1'))) timestamp = last_registered_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) print(u'Last registered time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'. format(date_string, last_registered_time)) timestamp = launch_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) print(u'Launch time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.format( date_string, launch_time)) print(u'Unknown2\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dynamic_info_struct.get(u'unknown2'))) print(u'Unknown3\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dynamic_info_struct.get(u'unknown3'))) if dynamic_info_value_data_size == self._DYNAMIC_INFO2_STRUCT_SIZE: timestamp = unknown_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) print(u'Unknown time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'. format(date_string, unknown_time)) print(u'') name = task_guids.get(sub_key.name, sub_key.name) output_writer.WriteText(u'Task: {0:s}'.format(name)) output_writer.WriteText(u'ID: {0:s}'.format(sub_key.name)) timestamp = task_cache_key.last_written_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) output_writer.WriteText( u'Last written time: {0!s}'.format(date_string)) if last_registered_time: # Note this is likely either the last registered time or # the update time. timestamp = last_registered_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) output_writer.WriteText( u'Last registered time: {0!s}'.format(date_string)) if launch_time: # Note this is likely the launch time. timestamp = launch_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) output_writer.WriteText( u'Launch time: {0!s}'.format(date_string)) if unknown_time: timestamp = unknown_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) output_writer.WriteText( u'Unknown time: {0!s}'.format(date_string)) output_writer.WriteText(u'')
def Parse(self, value_data, value_data_size): """Parses the value data. Args: value_data: a binary string containing the value data. value_data_size: the size of the value data. Returns: TODO Raises: RuntimeError: if the format is not supported. """ header_struct = self._HEADER_STRUCT.parse(value_data) value_data_offset = self._HEADER_STRUCT.sizeof() format_version = header_struct.get(u'format_version') if self._debug: print(u'Format version\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( format_version)) if format_version == 0x01: value_data_offset += 4 elif format_version == 0x09: header_struct = self._HEADER_9_STRUCT.parse(value_data) value_data_offset += self._HEADER_9_STRUCT.sizeof() if self._debug: print(u'Unknown2\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( header_struct.get(u'unknown2'))) elif format_version in [0x0c, 0x13]: uuid_object = uuid.UUID(bytes_le=value_data[4:20]) value_data_offset += 16 if self._debug: print(u'Known folder identifier\t\t\t\t\t\t\t: {0!s}'.format( uuid_object)) else: raise RuntimeError(u'Unsupported format.') if format_version == 0x09: sentinel = 0 else: entry_footer_struct = self._ENTRY_FOOTER_STRUCT.parse( value_data[value_data_offset:]) value_data_offset += self._ENTRY_FOOTER_STRUCT.sizeof() sentinel = entry_footer_struct.get(u'sentinel') if self._debug: print(u'Sentinel\t\t\t\t\t\t\t\t: 0x{0:02x}'.format(sentinel)) if self._debug: print(u'') while sentinel in [0x00, 0x01]: entry_header_struct = self._ENTRY_HEADER_STRUCT.parse( value_data[value_data_offset:]) value_data_offset += self._ENTRY_HEADER_STRUCT.sizeof() entry_data_size = entry_header_struct.get(u'data_size') if self._debug: print(u'Entry data offset\t\t\t\t\t\t\t: 0x{0:08x}'.format( value_data_offset)) print(u'Entry data size\t\t\t\t\t\t\t\t: {0:d}'.format( entry_data_size)) shell_item_list = pyfwsi.item_list() shell_item_list.copy_from_byte_stream( value_data[value_data_offset:]) for shell_item in iter(shell_item_list.items): if self._debug: print(u'Shell item: 0x{0:02x}'.format( shell_item.class_type)) print(u'Shell item: {0:s}'.format( getattr(shell_item, u'name', u''))) value_data_offset += entry_data_size entry_footer_struct = self._ENTRY_FOOTER_STRUCT.parse( value_data[value_data_offset:]) value_data_offset += self._ENTRY_FOOTER_STRUCT.sizeof() sentinel = entry_footer_struct.get(u'sentinel') if self._debug: print(u'Sentinel\t\t\t\t\t\t\t\t: 0x{0:02x}'.format(sentinel)) print(u'') if sentinel == 0x02 and value_data_offset < value_data_size: # TODO: determine the logic to this value. while ord(value_data[value_data_offset]) != 0x00: value_data_offset += 1 value_data_offset += 7 entry_footer_struct = self._ENTRY_FOOTER_STRUCT.parse( value_data[value_data_offset:]) value_data_offset += self._ENTRY_FOOTER_STRUCT.sizeof() sentinel = entry_footer_struct.get(u'sentinel') if self._debug: print(u'Sentinel\t\t\t\t\t\t\t\t: 0x{0:02x}'.format( sentinel)) print(u'') if value_data_offset < value_data_size: print(u'Trailing data:') print(u'Trailing data offset\t\t\t\t\t\t\t: 0x{0:08x}'.format( value_data_offset)) print(hexdump.Hexdump(value_data[value_data_offset:]))
def _CollectAppCompatCacheFromKey(self, output_writer, key_path): """Collects Application Compatibility Cache from a Windows Registry key. Args: output_writer: the output writer object. key_path: the path of the Application Compatibility Cache key. """ app_compat_cache_key = self._registry.GetKeyByPath(key_path) if not app_compat_cache_key: return self.key_found = True value = app_compat_cache_key.GetValueByName(u'AppCompatCache') if not value: logging.warning( u'Missing AppCompatCache value in key: {0:s}'.format(key_path)) return value_data = value.data value_data_size = len(value.data) # TODO: add non debug output # parser = AppCompatCacheDataParser(debug=self._debug) parser = AppCompatCacheDataParser(debug=True) if self._debug: # TODO: replace WriteText by more output specific method e.g. # WriteValueData. output_writer.WriteText(u'Value data:') output_writer.WriteText(hexdump.Hexdump(value_data)) format_type = parser.CheckSignature(value_data) if not format_type: logging.warning(u'Unsupported signature.') return header_object = parser.ParseHeader(format_type, value_data) # On Windows Vista and 2008 when the cache is empty it will # only consist of the header. if value_data_size <= header_object.header_size: return cached_entry_offset = header_object.header_size cached_entry_size = parser.DetermineCacheEntrySize( format_type, value_data, cached_entry_offset) if not cached_entry_size: logging.warning(u'Unsupported cached entry size.') return cached_entry_index = 0 while cached_entry_offset < value_data_size: cached_entry_object = parser.ParseCachedEntry( format_type, value_data, cached_entry_index, cached_entry_offset, cached_entry_size) cached_entry_offset += cached_entry_object.cached_entry_size cached_entry_index += 1 if (header_object.number_of_cached_entries != 0 and cached_entry_index >= header_object.number_of_cached_entries): break
def ParseCachedEntry(self, format_type, value_data, cached_entry_index, cached_entry_offset, cached_entry_size): """Parses a cached entry. Args: format_type: integer value that contains the format type. value_data: a binary string containing the value data. cached_entry_index: integer value that contains the cached entry index. cached_entry_offset: integer value that contains the offset of the cached entry data relative to the start of the value data. cached_entry_size: integer value that contains the cached entry data size. Returns: A cached entry object (instance of AppCompatCacheCachedEntry). Raises: RuntimeError: if the format type is not supported. """ if format_type not in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8, self.FORMAT_TYPE_10 ]: raise RuntimeError( u'Unsupported format type: {0:d}'.format(format_type)) cached_entry_data = value_data[ cached_entry_offset:cached_entry_offset + cached_entry_size] if format_type in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7 ]: if self._debug: print(u'Cached entry: {0:d} data:'.format(cached_entry_index)) print(hexdump.Hexdump(cached_entry_data)) elif format_type in [self.FORMAT_TYPE_8, self.FORMAT_TYPE_10]: if self._debug: print(u'Cached entry: {0:d} header data:'.format( cached_entry_index)) print(hexdump.Hexdump(cached_entry_data[:-2])) cached_entry_struct = None if format_type == self.FORMAT_TYPE_XP: if cached_entry_size == self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_XP_32BIT_STRUCT.parse( cached_entry_data) elif format_type == self.FORMAT_TYPE_2003: if cached_entry_size == self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_2003_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_2003_64BIT_STRUCT.parse( cached_entry_data) elif format_type == self.FORMAT_TYPE_VISTA: if cached_entry_size == self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.parse( cached_entry_data) elif format_type == self.FORMAT_TYPE_7: if cached_entry_size == self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(): cached_entry_struct = self._CACHED_ENTRY_7_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_7_64BIT_STRUCT.parse( cached_entry_data) elif format_type in [self.FORMAT_TYPE_8, self.FORMAT_TYPE_10]: if cached_entry_data[0:4] not in [ self._CACHED_ENTRY_SIGNATURE_8_0, self._CACHED_ENTRY_SIGNATURE_8_1 ]: raise RuntimeError(u'Unsupported cache entry signature') if cached_entry_size == self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_HEADER_8_STRUCT.parse( cached_entry_data) cached_entry_data_size = cached_entry_struct.get( u'cached_entry_data_size') cached_entry_size = 12 + cached_entry_data_size cached_entry_data = value_data[ cached_entry_offset:cached_entry_offset + cached_entry_size] if not cached_entry_struct: raise RuntimeError(u'Unsupported cache entry size: {0:d}'.format( cached_entry_size)) if format_type in [self.FORMAT_TYPE_8, self.FORMAT_TYPE_10]: if self._debug: print(u'Cached entry: {0:d} data:'.format(cached_entry_index)) print(hexdump.Hexdump(cached_entry_data)) cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = cached_entry_size path_offset = 0 data_size = 0 if format_type == self.FORMAT_TYPE_XP: string_size = 0 for string_index in xrange(0, 528, 2): if (ord(cached_entry_data[string_index]) == 0 and ord(cached_entry_data[string_index + 1]) == 0): break string_size += 2 cached_entry_object.path = cached_entry_data[0:string_size].decode( u'utf-16-le') if self._debug: print(u'Path\t\t\t\t\t\t\t\t\t: {0:s}'.format( cached_entry_object.path)) elif format_type in [ self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7 ]: path_size = cached_entry_struct.get(u'path_size') maximum_path_size = cached_entry_struct.get(u'maximum_path_size') path_offset = cached_entry_struct.get(u'path_offset') if self._debug: print(u'Path size\t\t\t\t\t\t\t\t: {0:d}'.format(path_size)) print(u'Maximum path size\t\t\t\t\t\t\t: {0:d}'.format( maximum_path_size)) print(u'Path offset\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( path_offset)) elif format_type in [self.FORMAT_TYPE_8, self.FORMAT_TYPE_10]: path_size = cached_entry_struct.get(u'path_size') if self._debug: print(u'Signature\t\t\t\t\t\t\t\t: {0:s}'.format( cached_entry_data[0:4])) print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( cached_entry_struct.get(u'unknown1'))) print(u'Cached entry data size\t\t\t\t\t\t\t: {0:d}'.format( cached_entry_data_size)) print(u'Path size\t\t\t\t\t\t\t\t: {0:d}'.format(path_size)) cached_entry_data_offset = 14 + path_size cached_entry_object.path = cached_entry_data[ 14:cached_entry_data_offset].decode(u'utf-16-le') if self._debug: print(u'Path\t\t\t\t\t\t\t\t\t: {0:s}'.format( cached_entry_object.path)) if format_type == self.FORMAT_TYPE_8: remaining_data = cached_entry_data[cached_entry_data_offset:] cached_entry_object.insertion_flags = construct.ULInt32( u'insertion_flags').parse(remaining_data[0:4]) cached_entry_object.shim_flags = construct.ULInt32( u'shim_flags').parse(remaining_data[4:8]) if self._debug: print(u'Insertion flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( cached_entry_object.insertion_flags)) print(u'Shim flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( cached_entry_object.shim_flags)) if cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_0: cached_entry_data_offset += 8 elif cached_entry_data[ 0:4] == self._CACHED_ENTRY_SIGNATURE_8_1: if self._debug: print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:04x}'.format( construct.ULInt16(u'unknown1').parse( remaining_data[8:10]))) cached_entry_data_offset += 10 remaining_data = cached_entry_data[cached_entry_data_offset:] if format_type in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7 ]: cached_entry_object.last_modification_time = cached_entry_struct.get( u'last_modification_time') elif format_type in [self.FORMAT_TYPE_8, self.FORMAT_TYPE_10]: cached_entry_object.last_modification_time = construct.ULInt64( u'last_modification_time').parse(remaining_data[0:8]) if not cached_entry_object.last_modification_time: if self._debug: print( u'Last modification time\t\t\t\t\t\t\t: 0x{0:08x}'.format( cached_entry_object.last_modification_time)) else: timestamp = cached_entry_object.last_modification_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) if self._debug: print( u'Last modification time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'. format(date_string, cached_entry_object.last_modification_time)) if format_type in [self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003]: cached_entry_object.file_size = cached_entry_struct.get( u'file_size') if self._debug: print(u'File size\t\t\t\t\t\t\t\t: {0:d}'.format( cached_entry_object.file_size)) elif format_type in [self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7]: cached_entry_object.insertion_flags = cached_entry_struct.get( u'insertion_flags') cached_entry_object.shim_flags = cached_entry_struct.get( u'shim_flags') if self._debug: print(u'Insertion flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( cached_entry_object.insertion_flags)) print(u'Shim flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( cached_entry_object.shim_flags)) if format_type == self.FORMAT_TYPE_XP: cached_entry_object.last_update_time = cached_entry_struct.get( u'last_update_time') if not cached_entry_object.last_update_time: if self._debug: print(u'Last update time\t\t\t\t\t\t\t: 0x{0:08x}'.format( cached_entry_object.last_update_time)) else: timestamp = cached_entry_object.last_update_time // 10 date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp)) if self._debug: print(u'Last update time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'. format(date_string, cached_entry_object.last_update_time)) if format_type == self.FORMAT_TYPE_7: data_offset = cached_entry_struct.get(u'data_offset') data_size = cached_entry_struct.get(u'data_size') if self._debug: print(u'Data offset\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( data_offset)) print(u'Data size\t\t\t\t\t\t\t\t: {0:d}'.format(data_size)) elif format_type in [self.FORMAT_TYPE_8, self.FORMAT_TYPE_10]: data_offset = cached_entry_offset + cached_entry_data_offset + 12 data_size = construct.ULInt32(u'data_size').parse( remaining_data[8:12]) if self._debug: print(u'Data size\t\t\t\t\t\t\t\t: {0:d}'.format(data_size)) if self._debug: print(u'') if path_offset > 0 and path_size > 0: path_size += path_offset maximum_path_size += path_offset if self._debug: print(u'Path data:') print( hexdump.Hexdump(value_data[path_offset:maximum_path_size])) cached_entry_object.path = value_data[ path_offset:path_size].decode(u'utf-16-le') if self._debug: print(u'Path\t\t\t\t\t\t\t\t\t: {0:s}'.format( cached_entry_object.path)) print(u'') if data_size > 0: data_size += data_offset cached_entry_object.data = value_data[data_offset:data_size] if self._debug: print(u'Data:') print(hexdump.Hexdump(cached_entry_object.data)) return cached_entry_object
def ParseHeader(self, format_type, value_data): """Parses the header. Args: format_type: integer value that contains the format type. value_data: a binary string containing the value data. Returns: A header object (instance of AppCompatCacheHeader). Raises: RuntimeError: if the format type is not supported. """ if format_type not in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8, self.FORMAT_TYPE_10 ]: raise RuntimeError( u'Unsupported format type: {0:d}'.format(format_type)) header_object = AppCompatCacheHeader() if format_type == self.FORMAT_TYPE_XP: header_object.header_size = self._HEADER_XP_32BIT_STRUCT.sizeof() header_struct = self._HEADER_XP_32BIT_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_2003: header_object.header_size = self._HEADER_2003_STRUCT.sizeof() header_struct = self._HEADER_2003_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_VISTA: header_object.header_size = self._HEADER_VISTA_STRUCT.sizeof() header_struct = self._HEADER_VISTA_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_7: header_object.header_size = self._HEADER_7_STRUCT.sizeof() header_struct = self._HEADER_7_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_8: header_object.header_size = self._HEADER_8_STRUCT.sizeof() header_struct = self._HEADER_8_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_10: header_object.header_size = self._HEADER_10_STRUCT.sizeof() header_struct = self._HEADER_10_STRUCT.parse(value_data) if self._debug: print(u'Header data:') print(hexdump.Hexdump(value_data[0:header_object.header_size])) if self._debug: print(u'Signature\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( header_struct.get(u'signature'))) if format_type in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_10 ]: header_object.number_of_cached_entries = header_struct.get( u'number_of_cached_entries') if self._debug: print(u'Number of cached entries\t\t\t\t\t\t: {0:d}'.format( header_object.number_of_cached_entries)) if format_type == self.FORMAT_TYPE_XP: number_of_lru_entries = header_struct.get(u'number_of_lru_entries') if self._debug: print(u'Number of LRU entries\t\t\t\t\t\t\t: 0x{0:08x}'.format( number_of_lru_entries)) print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( header_struct.get(u'unknown1'))) print(u'LRU entries:') data_offset = 16 if number_of_lru_entries > 0 and number_of_lru_entries <= 96: for lru_entry_index in range(number_of_lru_entries): lru_entry = construct.ULInt32(u'cache_entry_index').parse( value_data[data_offset:data_offset + 4]) data_offset += 4 if self._debug: print((u'LRU entry: {0:d}\t\t\t\t\t\t\t\t: {1:d} ' u'(offset: 0x{2:08x})').format( lru_entry_index, lru_entry, 400 + (lru_entry * 552))) if self._debug: print(u'') if self._debug: print(u'Unknown data:') print(hexdump.Hexdump(value_data[data_offset:400])) elif format_type == self.FORMAT_TYPE_8: if self._debug: print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( header_struct.get(u'unknown1'))) if self._debug: print(u'') return header_object