def testUTF16StreamCopyToString(self): """Test copying an UTF-16 byte stream to a string.""" test_file_path = self._GetTestFilePath(['PING.EXE-B29F6629.pf']) with open(test_file_path, 'rb') as file_object: byte_stream = file_object.read() # Read a null char terminated string. self.assertEqual( binary.UTF16StreamCopyToString(byte_stream[0x10:]), 'PING.EXE') # Read a fixed size string. expected_string = '\\DEVICE\\HARDDISKVOLUME' string = binary.UTF16StreamCopyToString( byte_stream[0x27f8:], byte_stream_size=44) self.assertEqual(string, expected_string) expected_string = '\\DEVICE\\HARDDISKVOLUME1' string = binary.UTF16StreamCopyToString( byte_stream[0x27f8:], byte_stream_size=46) self.assertEqual(string, expected_string) # Read another null char terminated string. expected_string = ( '\\DEVICE\\HARDDISKVOLUME1\\WINDOWS\\SYSTEM32\\NTDLL.DLL') string = binary.UTF16StreamCopyToString(byte_stream[7236:]) self.assertEqual(string, expected_string)
def __init__(self, date_time, date_time_description, entry_offset, dest_list_entry, droid_volume_identifier, droid_file_identifier, birth_droid_volume_identifier, birth_droid_file_identifier): """Initializes an event. Args: date_time (dfdatetime.DateTimeValues): date and time values. date_time_description (str): description of the meaning of the date and time values. entry_offset (int): offset of the DestList entry relative to the start of the DestList stream. droid_volume_identifier (str): droid volume identifier. droid_file_identifier (str): droid file identifier. birth_droid_volume_identifier (str): birth droid volume identifier. birth_droid_file_identifier (str): birth droid file identifier. dest_list_entry (construct.Struct): DestList entry. """ # TODO: move to parser plugin. hostname = binary.ByteStreamCopyToString(dest_list_entry.hostname, codepage=u'ascii') path = binary.UTF16StreamCopyToString(dest_list_entry.path) super(AutomaticDestinationsDestListEntryEvent, self).__init__(date_time, date_time_description) self.birth_droid_file_identifier = birth_droid_file_identifier self.birth_droid_volume_identifier = birth_droid_volume_identifier self.droid_file_identifier = droid_file_identifier self.droid_volume_identifier = droid_volume_identifier self.entry_number = dest_list_entry.entry_number self.hostname = hostname self.offset = entry_offset self.path = path self.pin_status = dest_list_entry.pin_status
def __init__(self, timestamp, timestamp_description, file_header, file_information, mapped_files, path, volume_serial_numbers, volume_device_paths): """Initializes the event. Args: timestamp: The FILETIME timestamp value. timestamp_description: The usage string for the timestamp value. file_header: The file header construct object. file_information: The file information construct object. mapped_files: A list of the mapped filenames. path: A path to the executable. volume_serial_numbers: A list of volume serial number strings. volume_device_paths: A list of volume device path strings. """ super(WinPrefetchExecutionEvent, self).__init__(timestamp, timestamp_description) self.offset = 0 self.version = file_header.get(u'version', None) self.executable = binary.UTF16StreamCopyToString( file_header.get(u'executable', u'')) self.prefetch_hash = file_header.get(u'prefetch_hash', None) self.run_count = file_information.get(u'run_count', None) self.mapped_files = mapped_files self.path = path self.number_of_volumes = file_information.get(u'number_of_volumes', 0) self.volume_serial_numbers = volume_serial_numbers self.volume_device_paths = volume_device_paths
def __init__(self, timestamp, timestamp_description, entry_offset, dest_list_entry): """Initializes the event object. Args: timestamp: The FILETIME value for the timestamp. timestamp_description: The usage string for the timestamp value. entry_offset: The offset of the DestList entry relative to the start of the DestList stream. dest_list_entry: The DestList entry (instance of construct.Struct). """ super(AutomaticDestinationsDestListEntryEvent, self).__init__(timestamp, timestamp_description) self.offset = entry_offset self.entry_number = dest_list_entry.entry_number self.hostname = binary.ByteStreamCopyToString(dest_list_entry.hostname, codepage=u'ascii') self.path = binary.UTF16StreamCopyToString(dest_list_entry.path) self.pin_status = dest_list_entry.pin_status self.droid_volume_identifier = binary.ByteStreamCopyToGuid( dest_list_entry.droid_volume_identifier) self.droid_file_identifier = binary.ByteStreamCopyToGuid( dest_list_entry.droid_file_identifier) self.birth_droid_volume_identifier = binary.ByteStreamCopyToGuid( dest_list_entry.birth_droid_volume_identifier) self.birth_droid_file_identifier = binary.ByteStreamCopyToGuid( dest_list_entry.birth_droid_file_identifier)
def _ParseCachedEntry(self, format_type, value_data, cached_entry_offset, cached_entry_size): """Parses a cached entry. Args: format_type: integer value that contains the format type. value_data: a binary string containing the value data. cached_entry_offset: integer value that contains the offset of the cached entry data relative to the start of the value data. cached_entry_size: integer value that contains the cached entry data size. Returns: A cached entry object (instance of AppCompatCacheCachedEntry). Raises: RuntimeError: if the format type is not supported. """ if format_type not in [ self._FORMAT_TYPE_XP, self._FORMAT_TYPE_2003, self._FORMAT_TYPE_VISTA, self._FORMAT_TYPE_7, self._FORMAT_TYPE_8, self._FORMAT_TYPE_10 ]: raise RuntimeError( u'[{0:s}] Unsupported format type: {1:d}'.format( self.NAME, format_type)) cached_entry_data = value_data[ cached_entry_offset:cached_entry_offset + cached_entry_size] cached_entry_struct = None if format_type == self._FORMAT_TYPE_XP: if cached_entry_size == self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_XP_32BIT_STRUCT.parse( cached_entry_data) elif format_type == self._FORMAT_TYPE_2003: if cached_entry_size == self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_2003_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_2003_64BIT_STRUCT.parse( cached_entry_data) elif format_type == self._FORMAT_TYPE_VISTA: if cached_entry_size == self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.parse( cached_entry_data) elif format_type == self._FORMAT_TYPE_7: if cached_entry_size == self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(): cached_entry_struct = self._CACHED_ENTRY_7_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_7_64BIT_STRUCT.parse( cached_entry_data) elif format_type in [self._FORMAT_TYPE_8, self._FORMAT_TYPE_10]: if cached_entry_data[0:4] not in [ self._CACHED_ENTRY_SIGNATURE_8_0, self._CACHED_ENTRY_SIGNATURE_8_1 ]: raise RuntimeError( u'[{0:s}] Unsupported cache entry signature'.format( self.NAME)) if cached_entry_size == self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_HEADER_8_STRUCT.parse( cached_entry_data) cached_entry_data_size = cached_entry_struct.get( u'cached_entry_data_size') cached_entry_size = 12 + cached_entry_data_size cached_entry_data = value_data[ cached_entry_offset:cached_entry_offset + cached_entry_size] if not cached_entry_struct: raise RuntimeError( u'[{0:s}] Unsupported cache entry size: {1:d}'.format( self.NAME, cached_entry_size)) cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = cached_entry_size path_offset = 0 data_size = 0 if format_type == self._FORMAT_TYPE_XP: string_size = 0 for string_index in xrange(0, 528, 2): if (ord(cached_entry_data[string_index]) == 0 and ord(cached_entry_data[string_index + 1]) == 0): break string_size += 2 cached_entry_object.path = binary.UTF16StreamCopyToString( cached_entry_data[0:string_size]) elif format_type in [ self._FORMAT_TYPE_2003, self._FORMAT_TYPE_VISTA, self._FORMAT_TYPE_7 ]: path_size = cached_entry_struct.get(u'path_size') path_offset = cached_entry_struct.get(u'path_offset') elif format_type in [self._FORMAT_TYPE_8, self._FORMAT_TYPE_10]: path_size = cached_entry_struct.get(u'path_size') cached_entry_data_offset = 14 + path_size cached_entry_object.path = binary.UTF16StreamCopyToString( cached_entry_data[14:cached_entry_data_offset]) if format_type == self._FORMAT_TYPE_8: remaining_data = cached_entry_data[cached_entry_data_offset:] cached_entry_object.insertion_flags = construct.ULInt32( u'insertion_flags').parse(remaining_data[0:4]) cached_entry_object.shim_flags = construct.ULInt32( u'shim_flags').parse(remaining_data[4:8]) if cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_0: cached_entry_data_offset += 8 elif cached_entry_data[ 0:4] == self._CACHED_ENTRY_SIGNATURE_8_1: cached_entry_data_offset += 10 remaining_data = cached_entry_data[cached_entry_data_offset:] if format_type in [ self._FORMAT_TYPE_XP, self._FORMAT_TYPE_2003, self._FORMAT_TYPE_VISTA, self._FORMAT_TYPE_7 ]: cached_entry_object.last_modification_time = cached_entry_struct.get( u'last_modification_time') elif format_type in [self._FORMAT_TYPE_8, self._FORMAT_TYPE_10]: cached_entry_object.last_modification_time = construct.ULInt64( u'last_modification_time').parse(remaining_data[0:8]) if format_type in [self._FORMAT_TYPE_XP, self._FORMAT_TYPE_2003]: cached_entry_object.file_size = cached_entry_struct.get( u'file_size') elif format_type in [self._FORMAT_TYPE_VISTA, self._FORMAT_TYPE_7]: cached_entry_object.insertion_flags = cached_entry_struct.get( u'insertion_flags') cached_entry_object.shim_flags = cached_entry_struct.get( u'shim_flags') if format_type == self._FORMAT_TYPE_XP: cached_entry_object.last_update_time = cached_entry_struct.get( u'last_update_time') if format_type == self._FORMAT_TYPE_7: data_offset = cached_entry_struct.get(u'data_offset') data_size = cached_entry_struct.get(u'data_size') elif format_type in [self._FORMAT_TYPE_8, self._FORMAT_TYPE_10]: data_offset = cached_entry_offset + cached_entry_data_offset + 12 data_size = construct.ULInt32(u'data_size').parse( remaining_data[8:12]) if path_offset > 0 and path_size > 0: path_size += path_offset cached_entry_object.path = binary.UTF16StreamCopyToString( value_data[path_offset:path_size]) if data_size > 0: data_size += data_offset cached_entry_object.data = value_data[data_offset:data_size] return cached_entry_object
def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses a Windows Prefetch file-like object. Args: parser_mediator: A parser mediator object (instance of ParserMediator). file_object: A file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ file_object.seek(0, os.SEEK_SET) file_header = self._ParseFileHeader(file_object) format_version = file_header.get(u'version', None) if format_version not in [17, 23, 26]: raise errors.UnableToParseFile( u'Unsupported format version: {0:d}'.format(format_version)) file_information = self._ParseFileInformation(file_object, format_version) metrics_array = self._ParseMetricsArray(file_object, format_version, file_information) try: filename_strings = self._ParseFilenameStrings( file_object, file_information) except UnicodeDecodeError as exception: file_name = parser_mediator.GetDisplayName() logging.warning(( u'[{0:s}] Unable to parse filename information from file {1:s} ' u'with error: {2:s}').format(parser_mediator.GetParserChain(), file_name, exception)) filename_strings = {} if len(metrics_array) != len(filename_strings): logging.debug( u'Mismatch in number of metrics and filename strings array entries.' ) executable = binary.UTF16StreamCopyToString( file_header.get(u'executable', u'')) volume_serial_numbers = [] volume_device_paths = [] path = u'' for volume_information in self._ParseVolumesInformationSection( file_object, format_version, file_information): volume_serial_number = volume_information.get(u'serial_number', 0) volume_device_path = self._ParseVolumeDevicePath( file_object, file_information, volume_information) volume_serial_numbers.append(volume_serial_number) volume_device_paths.append(volume_device_path) timestamp = volume_information.get(u'creation_time', 0) if timestamp: event_object = windows_events.WindowsVolumeCreationEvent( timestamp, volume_device_path, volume_serial_number, parser_mediator.GetFilename()) parser_mediator.ProduceEvent(event_object) for filename in filename_strings.itervalues(): if not filename: continue if (filename.startswith(volume_device_path) and filename.endswith(executable)): _, _, path = filename.partition(volume_device_path) mapped_files = [] for metrics_array_entry in metrics_array: file_reference = metrics_array_entry.get(u'file_reference', 0) filename_string_offset = metrics_array_entry.get( u'filename_string_offset', 0) filename = filename_strings.get(filename_string_offset, u'') if not filename: logging.debug( u'Missing filename string for offset: {0:d}.'.format( filename_string_offset)) continue if file_reference: mapped_file_string = ( u'{0:s} [MFT entry: {1:d}, sequence: {2:d}]').format( filename, file_reference & 0xffffffffffffL, file_reference >> 48) else: mapped_file_string = filename mapped_files.append(mapped_file_string) timestamp = file_information.get(u'last_run_time', 0) if timestamp: event_object = WinPrefetchExecutionEvent( timestamp, eventdata.EventTimestamp.LAST_RUNTIME, file_header, file_information, mapped_files, path, volume_serial_numbers, volume_device_paths) parser_mediator.ProduceEvent(event_object) # Check for the 7 older last run time values available in v26. if format_version == 26: for last_run_time_index in range(1, 8): last_run_time_identifier = u'last_run_time{0:d}'.format( last_run_time_index) timestamp = file_information.get(last_run_time_identifier, 0) if timestamp: event_object = WinPrefetchExecutionEvent( timestamp, u'Previous {0:s}'.format( eventdata.EventTimestamp.LAST_RUNTIME), file_header, file_information, mapped_files, path, volume_serial_numbers, volume_device_paths) parser_mediator.ProduceEvent(event_object)
def ParseDestList(self, parser_mediator, olecf_item): """Parses the DestList OLECF item. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. olecf_item (pyolecf.item): OLECF item. """ try: header = self._DEST_LIST_STREAM_HEADER.parse_stream(olecf_item) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( 'Unable to parse DestList header with error: {0!s}'.format( exception)) if header.format_version not in (1, 3, 4): parser_mediator.ProduceExtractionError( 'unsupported format version: {0:d}.'.format(header.format_version)) if header.format_version == 1: dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V1 elif header.format_version in (3, 4): dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V3 entry_offset = olecf_item.get_offset() while entry_offset < olecf_item.size: try: entry = dest_list_stream_entry.parse_stream(olecf_item) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( 'Unable to parse DestList entry with error: {0!s}'.format( exception)) if not entry: break display_name = 'DestList entry at offset: 0x{0:08x}'.format(entry_offset) try: droid_volume_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.droid_volume_identifier, display_name) except (TypeError, ValueError) as exception: droid_volume_identifier = '' parser_mediator.ProduceExtractionError( 'unable to read droid volume identifier with error: {0!s}'.format( exception)) try: droid_file_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.droid_file_identifier, display_name) except (TypeError, ValueError) as exception: droid_file_identifier = '' parser_mediator.ProduceExtractionError( 'unable to read droid file identifier with error: {0!s}'.format( exception)) try: birth_droid_volume_identifier = ( self._ParseDistributedTrackingIdentifier( parser_mediator, entry.birth_droid_volume_identifier, display_name)) except (TypeError, ValueError) as exception: birth_droid_volume_identifier = '' parser_mediator.ProduceExtractionError(( 'unable to read birth droid volume identifier with error: ' '{0:s}').format( exception)) try: birth_droid_file_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.birth_droid_file_identifier, display_name) except (TypeError, ValueError) as exception: birth_droid_file_identifier = '' parser_mediator.ProduceExtractionError(( 'unable to read birth droid file identifier with error: ' '{0:s}').format( exception)) if entry.last_modification_time == 0: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=entry.last_modification_time) event_data = AutomaticDestinationsDestListEntryEventData() event_data.birth_droid_file_identifier = birth_droid_file_identifier event_data.birth_droid_volume_identifier = birth_droid_volume_identifier event_data.droid_file_identifier = droid_file_identifier event_data.droid_volume_identifier = droid_volume_identifier event_data.entry_number = entry.entry_number event_data.hostname = binary.ByteStreamCopyToString( entry.hostname, codepage='ascii') event_data.offset = entry_offset event_data.path = binary.UTF16StreamCopyToString(entry.path) event_data.pin_status = entry.pin_status event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) entry_offset = olecf_item.get_offset()