def _CreateTestKey(self, key_path, time_string): """Creates Registry keys and values for testing. Args: key_path (str): Windows Registry key path. time_string (str): key last written date and time. Returns: dfwinreg.WinRegistryKey: Windows Registry key. """ filetime = dfdatetime_filetime.Filetime() filetime.CopyFromString(time_string) registry_key = dfwinreg_fake.FakeWinRegistryKey( 'Network', key_path=key_path, last_written_time=filetime.timestamp, offset=153) # Setup H drive. h_key = dfwinreg_fake.FakeWinRegistryKey( 'H', last_written_time=filetime.timestamp) registry_key.AddSubkey(h_key) value_data = b'\x00\x00\x00\x01' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ConnectionType', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) h_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x04' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DeferFlags', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) h_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x01' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ProviderFlags', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) h_key.AddValue(registry_value) value_data = 'Microsoft Windows Network'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ProviderName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) h_key.AddValue(registry_value) value_data = b'\x00\x02\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ProviderType', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) h_key.AddValue(registry_value) value_data = '\\\\acme.local\\Shares\\User_Data\\John.Doe'.encode( 'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'RemotePath', data=value_data, data_type=dfwinreg_definitions.REG_SZ) h_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'UserName', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) h_key.AddValue(registry_value) # Setup Z drive. z_key = dfwinreg_fake.FakeWinRegistryKey( 'Z', last_written_time=filetime.timestamp) registry_key.AddSubkey(z_key) value_data = b'\x00\x00\x00\x01' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ConnectionType', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) z_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x04' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DeferFlags', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) z_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x01' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ProviderFlags', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) z_key.AddValue(registry_value) value_data = 'Microsoft Windows Network'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ProviderName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) z_key.AddValue(registry_value) value_data = b'\x00\x02\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ProviderType', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) z_key.AddValue(registry_value) value_data = '\\\\secret_computer\\Media'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'RemotePath', data=value_data, data_type=dfwinreg_definitions.REG_SZ) z_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'UserName', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) z_key.AddValue(registry_value) return registry_key
def _CreateTestEvents(self): """Creates events for testing. Returns: list[EventObject]: events. """ test_events = [] filetime = dfdatetime_filetime.Filetime() event_data = windows_events.WindowsRegistryEventData() event_data.key_path = 'MY AutoRun key' event_data.parser = 'UNKNOWN' event_data.regvalue = {'Value': 'c:/Temp/evil.exe'} filetime.CopyFromDateTimeString('2012-04-20 22:38:46.929596') event = time_events.DateTimeValuesEvent( filetime, definitions.TIME_DESCRIPTION_WRITTEN) self._MergeEventAndEventData(event, event_data) test_events.append(event) event_data = windows_events.WindowsRegistryEventData() event_data.key_path = ( 'HKEY_CURRENT_USER\\Secret\\EvilEmpire\\Malicious_key') event_data.parser = 'UNKNOWN' event_data.regvalue = {'Value': 'send all the exes to the other world'} filetime.CopyFromDateTimeString('2012-04-20 23:56:46.929596') event = time_events.DateTimeValuesEvent( filetime, definitions.TIME_DESCRIPTION_WRITTEN) self._MergeEventAndEventData(event, event_data) test_events.append(event) event_data = windows_events.WindowsRegistryEventData() event_data.key_path = 'HKEY_CURRENT_USER\\Windows\\Normal' event_data.parser = 'UNKNOWN' event_data.regvalue = {'Value': 'run all the benign stuff'} filetime.CopyFromDateTimeString('2012-04-20 16:44:46') event = time_events.DateTimeValuesEvent( filetime, definitions.TIME_DESCRIPTION_WRITTEN) self._MergeEventAndEventData(event, event_data) test_events.append(event) timestamp = timelib.Timestamp.CopyFromString('2009-04-05 12:27:39') # TODO: refactor to use event data. event = time_events.TimestampEvent( timestamp, definitions.TIME_DESCRIPTION_WRITTEN, data_type='text:entry') event.hostname = 'nomachine' event.offset = 12 event.parser = 'UNKNOWN' event.text = ( 'This is a line by someone not reading the log line properly. And ' 'since this log line exceeds the accepted 80 chars it will be ' 'shortened.') event.username = '******' test_events.append(event) return test_events
def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses a Windows Prefetch file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. """ scca_file = pyscca.file() try: scca_file.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionError( u'unable to open file with error: {0:s}'.format(exception)) return format_version = scca_file.format_version executable_filename = scca_file.executable_filename prefetch_hash = scca_file.prefetch_hash run_count = scca_file.run_count number_of_volumes = scca_file.number_of_volumes volume_serial_numbers = [] volume_device_paths = [] path = u'' for volume_information in iter(scca_file.volumes): volume_serial_number = volume_information.serial_number volume_device_path = volume_information.device_path volume_serial_numbers.append(volume_serial_number) volume_device_paths.append(volume_device_path) timestamp = volume_information.get_creation_time_as_integer() if timestamp: event_data = windows_events.WindowsVolumeEventData() event_data.device_path = volume_device_path event_data.origin = parser_mediator.GetFilename() event_data.serial_number = volume_serial_number date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) for filename in iter(scca_file.filenames): if not filename: continue if (filename.startswith(volume_device_path) and filename.endswith(executable_filename)): _, _, path = filename.partition(volume_device_path) mapped_files = [] for entry_index, file_metrics in enumerate( scca_file.file_metrics_entries): mapped_file_string = file_metrics.filename if not mapped_file_string: parser_mediator.ProduceExtractionError( u'missing filename for file metrics entry: {0:d}'.format( entry_index)) continue file_reference = file_metrics.file_reference if file_reference: mapped_file_string = ( u'{0:s} [MFT entry: {1:d}, sequence: {2:d}]').format( mapped_file_string, file_reference & 0xffffffffffff, file_reference >> 48) mapped_files.append(mapped_file_string) event_data = WinPrefetchExecutionEventData() event_data.executable = executable_filename event_data.mapped_files = mapped_files event_data.number_of_volumes = number_of_volumes event_data.path = path event_data.prefetch_hash = prefetch_hash event_data.run_count = run_count event_data.version = format_version event_data.volume_device_paths = volume_device_paths event_data.volume_serial_numbers = volume_serial_numbers timestamp = scca_file.get_last_run_time_as_integer(0) if not timestamp: parser_mediator.ProduceExtractionError(u'missing last run time') date_time = dfdatetime_semantic_time.SemanticTime(u'Not set') else: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_RUN) parser_mediator.ProduceEventWithEventData(event, event_data) # Check for the 7 older last run time values available since # format version 26. if format_version >= 26: for last_run_time_index in range(1, 8): timestamp = scca_file.get_last_run_time_as_integer( last_run_time_index) if not timestamp: continue date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) date_time_description = u'Previous {0:s}'.format( definitions.TIME_DESCRIPTION_LAST_RUN) event = time_events.DateTimeValuesEvent( date_time, date_time_description) parser_mediator.ProduceEventWithEventData(event, event_data) scca_file.close()
def ParseDestList(self, parser_mediator, olecf_item): """Parses the DestList OLECF item. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. olecf_item (pyolecf.item): OLECF item. Raises: UnableToParseFile: if the DestList cannot be parsed. """ header_map = self._GetDataTypeMap('dest_list_header') try: header, entry_offset = self._ReadStructureFromFileObject( olecf_item, 0, header_map) except (ValueError, errors.ParseError) as exception: raise errors.UnableToParseFile( 'Unable to parse DestList header with error: {0!s}'.format( exception)) if header.format_version == 1: entry_map = self._GetDataTypeMap('dest_list_entry_v1') elif header.format_version in (3, 4): entry_map = self._GetDataTypeMap('dest_list_entry_v3') else: parser_mediator.ProduceExtractionWarning( 'unsupported format version: {0:d}.'.format( header.format_version)) return while entry_offset < olecf_item.size: try: entry, entry_data_size = self._ReadStructureFromFileObject( olecf_item, entry_offset, entry_map) except (ValueError, errors.ParseError) as exception: raise errors.UnableToParseFile( 'Unable to parse DestList entry with error: {0!s}'.format( exception)) display_name = 'DestList entry at offset: 0x{0:08x}'.format( entry_offset) try: droid_volume_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.droid_volume_identifier, display_name) except (TypeError, ValueError) as exception: droid_volume_identifier = '' parser_mediator.ProduceExtractionWarning( 'unable to read droid volume identifier with error: {0!s}'. format(exception)) try: droid_file_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.droid_file_identifier, display_name) except (TypeError, ValueError) as exception: droid_file_identifier = '' parser_mediator.ProduceExtractionWarning( 'unable to read droid file identifier with error: {0!s}'. format(exception)) try: birth_droid_volume_identifier = ( self._ParseDistributedTrackingIdentifier( parser_mediator, entry.birth_droid_volume_identifier, display_name)) except (TypeError, ValueError) as exception: birth_droid_volume_identifier = '' parser_mediator.ProduceExtractionWarning(( 'unable to read birth droid volume identifier with error: ' '{0:s}').format(exception)) try: birth_droid_file_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.birth_droid_file_identifier, display_name) except (TypeError, ValueError) as exception: birth_droid_file_identifier = '' parser_mediator.ProduceExtractionWarning( ('unable to read birth droid file identifier with error: ' '{0:s}').format(exception)) if entry.last_modification_time == 0: date_time = dfdatetime_semantic_time.NotSet() else: date_time = dfdatetime_filetime.Filetime( timestamp=entry.last_modification_time) event_data = AutomaticDestinationsDestListEntryEventData() event_data.birth_droid_file_identifier = birth_droid_file_identifier event_data.birth_droid_volume_identifier = birth_droid_volume_identifier event_data.droid_file_identifier = droid_file_identifier event_data.droid_volume_identifier = droid_volume_identifier event_data.entry_number = entry.entry_number event_data.hostname = entry.hostname.rstrip('\x00') event_data.offset = entry_offset event_data.path = entry.path.rstrip('\x00') event_data.pin_status = entry.pin_status event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) entry_offset += entry_data_size
def _CreateTestKey(self, key_path, time_string): """Creates Registry keys and values for testing. Args: key_path (str): Windows Registry key path. time_string (str): key last written date and time. Returns: dfwinreg.WinRegistryKey: a Windows Registry key. """ filetime = dfdatetime_filetime.Filetime() filetime.CopyFromDateTimeString(time_string) registry_key = dfwinreg_fake.FakeWinRegistryKey( 'Winlogon', key_path=key_path, last_written_time=filetime.timestamp, offset=153) # Setup Winlogon values. value_data = '1'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'AutoAdminLogon', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x01' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'AutoRestartShell', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) registry_key.AddValue(registry_value) value_data = '0 0 0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Background', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = '10'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'CachedLogonsCount', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = 'no'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DebugServerCommand', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = ''.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DefaultDomainName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = 'user'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DefaultUserName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x01' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DisableCAD', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) registry_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ForceUnlockLogon', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) registry_key.AddValue(registry_value) value_data = ''.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'LegalNoticeCaption', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = ''.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'LegalNoticeText', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x05' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'PasswordExpiryWarning', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'PowerdownAfterShutdown', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = '{A520A1A4-1780-4FF6-BD18-167343C5AF16}'.encode( 'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'PreCreateKnownFolders', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = '1'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ReportBootOk', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = 'explorer.exe'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Shell', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x2b' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ShutdownFlags', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ShutdownWithoutLogon', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = 'C:\\Windows\\system32\\userinit.exe'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Userinit', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = 'SystemPropertiesPerformance.exe/pagefile'.encode( 'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'VMApplet', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'WinStationsDisabled', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) # Setup registered event handlers. notify_key_name = 'Notify' notify_key = dfwinreg_fake.FakeWinRegistryKey(notify_key_name) registry_key.AddSubkey(notify_key_name, notify_key) navlogon_key_name = 'NavLogon' navlogon_key = dfwinreg_fake.FakeWinRegistryKey( navlogon_key_name, last_written_time=filetime.timestamp) notify_key.AddSubkey(navlogon_key_name, navlogon_key) value_data = 'NavLogon.dll'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DllName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) navlogon_key.AddValue(registry_value) value_data = 'NavLogoffEvent'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Logoff', data=value_data, data_type=dfwinreg_definitions.REG_SZ) navlogon_key.AddValue(registry_value) value_data = 'NavStartShellEvent'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'StartShell', data=value_data, data_type=dfwinreg_definitions.REG_SZ) navlogon_key.AddValue(registry_value) secret_malware_key_name = 'SecretMalware' secret_malware_key = dfwinreg_fake.FakeWinRegistryKey( secret_malware_key_name, last_written_time=filetime.timestamp) notify_key.AddSubkey(secret_malware_key_name, secret_malware_key) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Asynchronous', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) secret_malware_key.AddValue(registry_value) value_data = 'secret_malware.dll'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DllName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Impersonate', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) secret_malware_key.AddValue(registry_value) value_data = 'secretEventLock'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Lock', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventLogoff'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Logoff', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventLogon'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Logon', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventShutdown'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Shutdown', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventSmartCardLogonNotify'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'SmartCardLogonNotify', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventStartShell'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'StartShell', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventStartup'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Startup', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventStopScreenSaver'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'StopScreenSaver', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) value_data = 'secretEventUnlock'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Unlock', data=value_data, data_type=dfwinreg_definitions.REG_SZ) secret_malware_key.AddValue(registry_value) return registry_key
def access_time(self): """dfdatetime.Filetime: access time or None if not set.""" timestamp = self._fsntfs_attribute.get_access_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def entry_modification_time(self): """dfdatetime.Filetime: entry modification time or None if not set.""" timestamp = self._fsntfs_attribute.get_entry_modification_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def _ParseFileReferenceKey(self, parser_mediator, file_reference_key): """Parses a file reference key (sub key of Root\\File\\%VOLUME%) for events. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_reference_key (pyregf.key): file reference key. """ event_data = AMCacheFileEventData() try: if '0000' in file_reference_key.name: # A NTFS file is a combination of MFT entry and sequence number. sequence_number, mft_entry = file_reference_key.name.split( '0000') mft_entry = int(mft_entry, 16) sequence_number = int(sequence_number, 16) event_data.file_reference = '{0:d}-{1:d}'.format( mft_entry, sequence_number) else: # A FAT file is a single number. file_reference = int(file_reference_key.name, 16) event_data.file_reference = '{0:d}'.format(file_reference) except (ValueError, TypeError): pass for value_name, attribute_name in self._FILE_REFERENCE_KEY_VALUES.items( ): value = file_reference_key.get_value_by_name(value_name) if not value: continue value_data = self._GetValueDataAsObject(parser_mediator, value) if attribute_name == 'sha1' and value_data.startswith('0000'): # Strip off the 4 leading zero's from the sha1 hash. value_data = value_data[4:] setattr(event_data, attribute_name, value_data) amcache_time_value = file_reference_key.get_value_by_name( self._AMCACHE_ENTRY_WRITE_TIME) if amcache_time_value: amcache_time = filetime.Filetime( amcache_time_value.get_data_as_integer()) event = time_events.DateTimeValuesEvent( amcache_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) creation_time_value = file_reference_key.get_value_by_name( self._AMCACHE_FILE_CREATION_TIME) if creation_time_value: creation_time = filetime.Filetime( creation_time_value.get_data_as_integer()) event = time_events.DateTimeValuesEvent( creation_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) modification_time_value = file_reference_key.get_value_by_name( self._AMCACHE_FILE_MODIFICATION_TIME) if modification_time_value: modification_time = filetime.Filetime( modification_time_value.get_data_as_integer()) event = time_events.DateTimeValuesEvent( modification_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) compilation_time_value = file_reference_key.get_value_by_name( self._AMCACHE_COMPILATION_TIME) if compilation_time_value: link_time = posix_time.PosixTime( compilation_time_value.get_data_as_integer()) event = time_events.DateTimeValuesEvent( link_time, definitions.TIME_DESCRIPTION_CHANGE) parser_mediator.ProduceEventWithEventData(event, event_data)
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): """Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. """ # TODO: Test other Office versions to make sure this plugin is applicable. entries = [] for registry_value in registry_key.GetValues(): # Ignore any value not in the form: 'Item [0-9]+'. if not registry_value.name or not self._RE_VALUE_NAME.search( registry_value.name): continue # Ignore any value that is empty or that does not contain a string. if not registry_value.data or not registry_value.DataIsString(): continue value_string = registry_value.GetDataAsObject() values = self._RE_VALUE_DATA.findall(value_string) # Values will contain a list containing a tuple containing 2 values. if len(values) != 1 or len(values[0]) != 2: continue try: timestamp = int(values[0][0], 16) except ValueError: parser_mediator.ProduceExtractionWarning( ('unable to convert filetime string to an integer for ' 'value: {0:s}.').format(registry_value.name)) continue event_data = OfficeMRUWindowsRegistryEventData() event_data.key_path = (registry_key.path).replace("\\", "/") # TODO: split value string in individual values. event_data.value_string = value_string entries.append('{0:s}: {1:s}'.format(registry_value.name, value_string)) if not timestamp: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) # TODO: determine if this should be last written time. event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = OfficeMRUListWindowsRegistryEventData() event_data.entries = ' '.join([value for value in entries]) or None event_data.key_path = (registry_key.path).replace("\\", "/") event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def _CreateTestKey(self, key_path, time_string): """Creates Registry keys and values for testing. Args: key_path (str): Windows Registry key path. time_string (str): key last written date and time. Returns: dfwinreg.WinRegistryKey: a Windows Registry key. """ filetime = dfdatetime_filetime.Filetime() filetime.CopyFromString(time_string) registry_key = dfwinreg_fake.FakeWinRegistryKey( u'NetworkList', key_path=key_path, last_written_time=filetime.timestamp, offset=153) # Setup Profiles. profiles = dfwinreg_fake.FakeWinRegistryKey(u'Profiles') registry_key.AddSubkey(profiles) profile_1 = dfwinreg_fake.FakeWinRegistryKey( u'{B358E985-4464-4ABD-AF99-7D4A0AF66BB7}') profiles.AddSubkey(profile_1) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Category', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) profile_1.AddValue(registry_value) value_data = ( b'\xde\x07\x0c\x00\x02\x00\x10\x00\x08\x00\x04\x00\x27\x00\x6a\x00' ) registry_value = dfwinreg_fake.FakeWinRegistryValue( u'DateCreated', data=value_data, data_type=dfwinreg_definitions.REG_BINARY) profile_1.AddValue(registry_value) value_data = ( b'\xdf\x07\x01\x00\x02\x00\x1b\x00\x0f\x00\x0f\x00\x1b\x00\xc5\x03' ) registry_value = dfwinreg_fake.FakeWinRegistryValue( u'DateLastConnected', data=value_data, data_type=dfwinreg_definitions.REG_BINARY) profile_1.AddValue(registry_value) value_data = u'My Awesome Wifi Hotspot'.encode(u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Description', data=value_data, data_type=dfwinreg_definitions.REG_SZ) profile_1.AddValue(registry_value) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Managed', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) profile_1.AddValue(registry_value) value_data = b'\x00\x00\x00\x47' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'NameType', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) profile_1.AddValue(registry_value) value_data = u'My Awesome Wifi Hotspot'.encode(u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'ProfileName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) profile_1.AddValue(registry_value) profile_2 = dfwinreg_fake.FakeWinRegistryKey( u'{C1C57B58-BFE2-428B-818C-9D69A873AD3D}') profiles.AddSubkey(profile_2) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Category', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) profile_2.AddValue(registry_value) value_data = ( b'\xde\x07\x05\x00\x02\x00\x06\x00\x11\x00\x02\x00\x13\x00\x1b\x03' ) registry_value = dfwinreg_fake.FakeWinRegistryValue( u'DateCreated', data=value_data, data_type=dfwinreg_definitions.REG_BINARY) profile_2.AddValue(registry_value) value_data = ( b'\xde\x07\x05\x00\x02\x00\x06\x00\x11\x00\x07\x00\x36\x00\x0a\x00' ) registry_value = dfwinreg_fake.FakeWinRegistryValue( u'DateLastConnected', data=value_data, data_type=dfwinreg_definitions.REG_BINARY) profile_2.AddValue(registry_value) value_data = u'Network'.encode(u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Description', data=value_data, data_type=dfwinreg_definitions.REG_SZ) profile_2.AddValue(registry_value) value_data = b'\x00\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Managed', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) profile_2.AddValue(registry_value) value_data = b'\x00\x00\x00\x06' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'NameType', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) profile_2.AddValue(registry_value) value_data = u'Network'.encode(u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'ProfileName', data=value_data, data_type=dfwinreg_definitions.REG_SZ) profile_2.AddValue(registry_value) # Setup signatures. signatures = dfwinreg_fake.FakeWinRegistryKey(u'Signatures') registry_key.AddSubkey(signatures) managed = dfwinreg_fake.FakeWinRegistryKey(u'Managed') signatures.AddSubkey(managed) unmanaged = dfwinreg_fake.FakeWinRegistryKey(u'Unmanaged') signatures.AddSubkey(unmanaged) unmanaged_subkey = dfwinreg_fake.FakeWinRegistryKey( u'010103000F0000F0080000000F0000F0E8982FB31F37E52AF30A6575A4898CE667' u'6E8C2F99C4C5131D84F64BD823E0') unmanaged.AddSubkey(unmanaged_subkey) value_data = b'\x00\x50\x56\xea\x6c\xec' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'DefaultGatewayMac', data=value_data, data_type=dfwinreg_definitions.REG_BINARY) unmanaged_subkey.AddValue(registry_value) value_data = u'Network'.encode(u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Description', data=value_data, data_type=dfwinreg_definitions.REG_SZ) unmanaged_subkey.AddValue(registry_value) value_data = u'localdomain'.encode(u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'DnsSuffix', data=value_data, data_type=dfwinreg_definitions.REG_SZ) unmanaged_subkey.AddValue(registry_value) value_data = u'Network'.encode(u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'FirstNetwork', data=value_data, data_type=dfwinreg_definitions.REG_SZ) unmanaged_subkey.AddValue(registry_value) value_data = u'{C1C57B58-BFE2-428B-818C-9D69A873AD3D}'.encode( u'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( u'ProfileGuid', data=value_data, data_type=dfwinreg_definitions.REG_SZ) unmanaged_subkey.AddValue(registry_value) value_data = b'\x00\x00\x00\x08' registry_value = dfwinreg_fake.FakeWinRegistryValue( u'Source', data=value_data, data_type=dfwinreg_definitions.REG_DWORD_BIG_ENDIAN) unmanaged_subkey.AddValue(registry_value) return registry_key
def ParseFileObject(self, parser_mediator, file_object, display_name=None, **kwargs): """Parses a Windows Shortcut (LNK) file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. display_name (Optional[str]): display name. """ if not display_name: display_name = parser_mediator.GetDisplayName() lnk_file = pylnk.file() lnk_file.set_ascii_codepage(parser_mediator.codepage) try: lnk_file.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionError( 'unable to open file with error: {0:s}'.format(exception)) return link_target = None if lnk_file.link_target_identifier_data: # TODO: change file_entry.name to display name once it is generated # correctly. display_name = parser_mediator.GetFilename() shell_items_parser = shell_items.ShellItemsParser(display_name) shell_items_parser.ParseByteStream( parser_mediator, lnk_file.link_target_identifier_data, codepage=parser_mediator.codepage) link_target = shell_items_parser.CopyToPath() access_time = lnk_file.get_file_access_time_as_integer() if access_time != 0: date_time = dfdatetime_filetime.Filetime(timestamp=access_time) event = WinLnkLinkEvent(date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS, lnk_file, link_target) parser_mediator.ProduceEvent(event) creation_time = lnk_file.get_file_creation_time_as_integer() if creation_time != 0: date_time = dfdatetime_filetime.Filetime(timestamp=creation_time) event = WinLnkLinkEvent(date_time, definitions.TIME_DESCRIPTION_CREATION, lnk_file, link_target) parser_mediator.ProduceEvent(event) modification_time = lnk_file.get_file_modification_time_as_integer() if modification_time != 0: date_time = dfdatetime_filetime.Filetime( timestamp=modification_time) event = WinLnkLinkEvent(date_time, definitions.TIME_DESCRIPTION_MODIFICATION, lnk_file, link_target) parser_mediator.ProduceEvent(event) if access_time == 0 and creation_time == 0 and modification_time == 0: date_time = dfdatetime_semantic_time.SemanticTime('Not set') event = WinLnkLinkEvent(date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME, lnk_file, link_target) parser_mediator.ProduceEvent(event) if lnk_file.droid_file_identifier: try: self._ParseDistributedTrackingIdentifier( parser_mediator, lnk_file.droid_file_identifier, display_name) except (TypeError, ValueError) as exception: parser_mediator.ProduceExtractionError( 'unable to read droid file identifier with error: {0:s}.'. format(exception)) if lnk_file.birth_droid_file_identifier: try: self._ParseDistributedTrackingIdentifier( parser_mediator, lnk_file.birth_droid_file_identifier, display_name) except (TypeError, ValueError) as exception: parser_mediator.ProduceExtractionError( ('unable to read birth droid file identifier with error: ' '{0:s}.').format(exception)) lnk_file.close()
def _ParseRecord(self, parser_mediator, file_object, record_offset, record_size): """Parses an INFO-2 record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. record_offset (int): record offset. record_size (int): record size. """ record_data = file_object.read(record_size) try: ascii_filename = self._ASCII_STRING.parse(record_data) except (IOError, construct.FieldError) as exception: parser_mediator.ProduceExtractionError(( 'unable to parse recycler ASCII filename at offset: 0x{0:08x} ' 'with error: {1!s}').format(record_offset, exception)) try: recycler_record_struct = self._RECYCLER_RECORD_STRUCT.parse( record_data[self._RECORD_INDEX_OFFSET:]) except (IOError, construct.FieldError) as exception: parser_mediator.ProduceExtractionError( ('unable to parse recycler index record at offset: 0x{0:08x} ' 'with error: {1!s}').format( record_offset + self._RECORD_INDEX_OFFSET, exception)) unicode_filename = None if record_size == 800: unicode_filename = binary.ReadUTF16( record_data[self._UNICODE_FILENAME_OFFSET:]) ascii_filename = None if ascii_filename and parser_mediator.codepage: try: ascii_filename = ascii_filename.decode( parser_mediator.codepage) except UnicodeDecodeError: ascii_filename = ascii_filename.decode( parser_mediator.codepage, errors='replace') elif ascii_filename: ascii_filename = repr(ascii_filename) if recycler_record_struct.deletion_time == 0: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=recycler_record_struct.deletion_time) event_data = WinRecycleBinEventData() event_data.drive_number = recycler_record_struct.drive_number event_data.original_filename = unicode_filename or ascii_filename event_data.file_size = recycler_record_struct.file_size event_data.offset = record_offset event_data.record_index = recycler_record_struct.index event_data.short_filename = ascii_filename event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_DELETED) parser_mediator.ProduceEventWithEventData(event, event_data)
def last_written_time(self): """dfdatetime.DateTimeValues: last written time.""" if self._last_written_time is None: return dfdatetime_semantic_time.SemanticTime('Not set') return dfdatetime_filetime.Filetime(timestamp=self._last_written_time)
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): """Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. """ names_key = registry_key.GetSubkeyByName('Names') if not names_key: parser_mediator.ProduceExtractionWarning('missing subkey: Names.') return last_written_time_per_username = { registry_value.name: registry_value.last_written_time for registry_value in names_key.GetSubkeys()} for subkey in registry_key.GetSubkeys(): if subkey.name == 'Names': continue try: f_value = self._ParseFValue(subkey) except errors.ParseError as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse F value with error: {0!s}'.format(exception)) continue registry_value = subkey.GetValueByName('V') if not registry_value: parser_mediator.ProduceExtractionWarning( 'missing Registry value: "V" in subkey: {0:s}.'.format( subkey.name)) continue v_value_map = self._GetDataTypeMap('v_value') try: v_value = self._ReadStructureFromByteStream( registry_value.data, 0, v_value_map) except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse V value with error: {0!s}'.format(exception)) continue username = self._ParseVValueString( parser_mediator, registry_value.data, v_value[1]) fullname = self._ParseVValueString( parser_mediator, registry_value.data, v_value[2]) comments = self._ParseVValueString( parser_mediator, registry_value.data, v_value[3]) last_written_time = last_written_time_per_username.get(username, None) # TODO: check if subkey.name == f_value.rid event_data = SAMUsersWindowsRegistryEventData() event_data.account_rid = f_value.rid event_data.comments = comments event_data.fullname = fullname event_data.key_path = registry_key.path event_data.login_count = f_value.number_of_logons event_data.username = username event = time_events.DateTimeValuesEvent( last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) if f_value.last_login_time != 0: date_time = dfdatetime_filetime.Filetime( timestamp=f_value.last_login_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_LOGIN) parser_mediator.ProduceEventWithEventData(event, event_data) if f_value.last_password_set_time != 0: date_time = dfdatetime_filetime.Filetime( timestamp=f_value.last_password_set_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_PASSWORD_RESET) parser_mediator.ProduceEventWithEventData(event, event_data)
def access_time(self): """dfdatetime.DateTimeValues: access time or None if not available.""" timestamp = self._fsntfs_file_entry.get_access_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def _ParseContainerTable(self, parser_mediator, table, container_name): """Parses a Container_# table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. table (pyesedb.table): table. container_name (str): container name, which indicates the table type. Raises: ValueError: if the table value is missing. """ if table is None: raise ValueError('Missing table value.') for record_index, esedb_record in enumerate(table.records): if parser_mediator.abort: break # TODO: add support for: # wpnidm, iecompat, iecompatua, DNTException, DOMStore if container_name == 'Content': value_mappings = self._CONTAINER_TABLE_VALUE_MAPPINGS else: value_mappings = None try: record_values = self._GetRecordValues( parser_mediator, table.name, esedb_record, value_mappings=value_mappings) except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning( ('Unable to retrieve record values from record: {0:d} ' 'in table: {1:s}').format(record_index, table.name)) continue if (container_name in self._SUPPORTED_CONTAINER_NAMES or container_name.startswith('MSHist')): access_count = record_values.get('AccessCount', None) cached_filename = record_values.get('Filename', None) cached_file_size = record_values.get('FileSize', None) cache_identifier = record_values.get('CacheId', None) container_identifier = record_values.get('ContainerId', None) entry_identifier = record_values.get('EntryId', None) file_extension = record_values.get('FileExtension', None) redirect_url = record_values.get('RedirectUrl', None) sync_count = record_values.get('SyncCount', None) url = record_values.get('Url', '') # Ignore an URL that start with a binary value. if ord(url[0]) < 0x20 or ord(url[0]) == 0x7f: url = None request_headers = record_values.get('RequestHeaders', None) # Ignore non-Unicode request headers values. if not isinstance(request_headers, str): request_headers = None response_headers = record_values.get('ResponseHeaders', None) # Ignore non-Unicode response headers values. if not isinstance(response_headers, str): response_headers = None event_data = MsieWebCacheContainerEventData() event_data.access_count = access_count event_data.cached_filename = cached_filename event_data.cached_file_size = cached_file_size event_data.cache_identifier = cache_identifier event_data.container_identifier = container_identifier event_data.entry_identifier = entry_identifier event_data.file_extension = file_extension event_data.redirect_url = redirect_url event_data.request_headers = request_headers event_data.response_headers = response_headers event_data.sync_count = sync_count event_data.url = url timestamp = record_values.get('SyncTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, 'Synchronization time') parser_mediator.ProduceEventWithEventData( event, event_data) timestamp = record_values.get('CreationTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData( event, event_data) timestamp = record_values.get('ExpiryTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_EXPIRATION) parser_mediator.ProduceEventWithEventData( event, event_data) timestamp = record_values.get('ModifiedTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData( event, event_data) timestamp = record_values.get('AccessedTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS) parser_mediator.ProduceEventWithEventData( event, event_data) timestamp = record_values.get('PostCheckTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, 'Post check time') parser_mediator.ProduceEventWithEventData( event, event_data)
def modification_time(self): """dfdatetime.DateTimeValues: modification time or None if not available.""" timestamp = self._fsntfs_file_entry.get_modification_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def ParseContainersTable(self, parser_mediator, database=None, table=None, **unused_kwargs): """Parses a Containers table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table. Raises: ValueError: if the database or table value is missing. """ if database is None: raise ValueError('Missing database value.') if table is None: raise ValueError('Missing table value.') for esedb_record in table.records: if parser_mediator.abort: break record_values = self._GetRecordValues(parser_mediator, table.name, esedb_record) event_data = MsieWebCacheContainersEventData() event_data.container_identifier = record_values.get( 'ContainerId', None) event_data.directory = record_values.get('Directory', None) event_data.name = record_values.get('Name', None) event_data.set_identifier = record_values.get('SetId', None) timestamp = record_values.get('LastScavengeTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, 'Last Scavenge Time') parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = record_values.get('LastAccessTime', None) if timestamp: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS) parser_mediator.ProduceEventWithEventData(event, event_data) container_identifier = record_values.get('ContainerId', None) container_name = record_values.get('Name', None) if not container_identifier or not container_name: continue if container_name in self._IGNORED_CONTAINER_NAMES: parser_mediator.ProduceExtractionWarning( 'Skipped container (ContainerId: {0:d}, Name: {1:s})'. format(container_identifier, container_name)) continue table_name = 'Container_{0:d}'.format(container_identifier) esedb_table = database.get_table_by_name(table_name) if not esedb_table: parser_mediator.ProduceExtractionWarning( 'Missing table: {0:s}'.format(table_name)) continue self._ParseContainerTable(parser_mediator, esedb_table, container_name)
def creation_time(self): """dfdatetime.Filetime: creation time or None if not set.""" timestamp = self._fsntfs_attribute.get_creation_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def CreateTestEvents(): """Creates events for testing. Returns: list[EventObject]: events. """ test_events = [] hostname = 'MYHOSTNAME' data_type = 'test:event' event = events.EventObject() event.username = '******' event.filename = 'c:/Users/joesmith/NTUSER.DAT' event.hostname = hostname event.timestamp = 0 event.data_type = data_type event.text = '' test_events.append(event) filetime = dfdatetime_filetime.Filetime() # TODO: move this to a WindowsRegistryEvent unit test. event_data = windows_events.WindowsRegistryEventData() event_data.key_path = 'MY AutoRun key' event_data.hostname = hostname event_data.regvalue = {'Value': 'c:/Temp/evil.exe'} filetime.CopyFromString('2012-04-20 22:38:46.929596') event = time_events.DateTimeValuesEvent( filetime, definitions.TIME_DESCRIPTION_WRITTEN) _MergeEventAndEventData(event, event_data) test_events.append(event) event_data = windows_events.WindowsRegistryEventData() event_data.key_path = 'HKEY_CURRENT_USER\\Secret\\EvilEmpire\\Malicious_key' event_data.hostname = hostname event_data.regvalue = {'Value': 'send all the exes to the other world'} filetime.CopyFromString('2012-04-20 23:56:46.929596') event = time_events.DateTimeValuesEvent( filetime, definitions.TIME_DESCRIPTION_WRITTEN) _MergeEventAndEventData(event, event_data) test_events.append(event) event_data = windows_events.WindowsRegistryEventData() event_data.key_path = 'HKEY_CURRENT_USER\\Windows\\Normal' event_data.hostname = hostname event_data.regvalue = {'Value': 'run all the benign stuff'} filetime.CopyFromString('2012-04-20 16:44:46') event = time_events.DateTimeValuesEvent( filetime, definitions.TIME_DESCRIPTION_WRITTEN) _MergeEventAndEventData(event, event_data) test_events.append(event) timestamp = timelib.Timestamp.CopyFromString('2012-04-30 10:29:47.929596') filename = 'c:/Temp/evil.exe' attributes = { 'text': 'This log line reads ohh so much.'} event = TestEvent(timestamp, attributes) event.filename = filename event.hostname = hostname test_events.append(event) timestamp = timelib.Timestamp.CopyFromString('2012-04-30 10:29:47.929596') attributes = { 'text': 'Nothing of interest here, move on.'} event = TestEvent(timestamp, attributes) event.filename = filename event.hostname = hostname test_events.append(event) timestamp = timelib.Timestamp.CopyFromString('2012-04-30 13:06:47.939596') attributes = { 'text': 'Mr. Evil just logged into the machine and got root.'} event = TestEvent(timestamp, attributes) event.filename = filename event.hostname = hostname test_events.append(event) timestamp = timelib.Timestamp.CopyFromString('2012-06-05 22:14:19.000000') # TODO: refactor to use event data. event = time_events.TimestampEvent( timestamp, definitions.TIME_DESCRIPTION_WRITTEN, data_type='text:entry') event.hostname = 'nomachine' event.offset = 12 event.body = ( 'This is a line by someone not reading the log line properly. And ' 'since this log line exceeds the accepted 80 chars it will be ' 'shortened.') # TODO: fix missing body attribute event.text = event.body event.username = '******' test_events.append(event) return test_events
def modification_time(self): """dfdatetime.Filetime: modification time.""" timestamp = self._fsntfs_attribute.get_modification_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def _ParseUrl(self, parser_mediator, format_version, cache_directories, msiecf_item, recovered=False): """Extract data from a MSIE Cache Files (MSIECF) URL item. Every item is stored as an event object, one for each timestamp. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. format_version (str): MSIECF format version. cache_directories (list[str]): cache directory names. msiecf_item (pymsiecf.url): MSIECF URL item. recovered (Optional[bool]): True if the item was recovered. """ # The secondary time can be stored in either UTC or local time # this is dependent on what the index.dat file is used for. # Either the file path or location string can be used to distinguish # between the different type of files. timestamp = msiecf_item.get_primary_time_as_integer() if not timestamp: primary_date_time = dfdatetime_semantic_time.NotSet() else: primary_date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) primary_date_time_description = 'Primary Time' timestamp = msiecf_item.get_secondary_time_as_integer() secondary_date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) secondary_date_time_description = 'Secondary Time' if msiecf_item.type: if msiecf_item.type == 'cache': primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS secondary_date_time_description = ( definitions.TIME_DESCRIPTION_MODIFICATION) elif msiecf_item.type == 'cookie': primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS secondary_date_time_description = ( definitions.TIME_DESCRIPTION_MODIFICATION) elif msiecf_item.type == 'history': primary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) elif msiecf_item.type == 'history-daily': primary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) # The secondary_date_time is in localtime normalize it to be in UTC. secondary_date_time.is_local_time = True elif msiecf_item.type == 'history-weekly': primary_date_time_description = definitions.TIME_DESCRIPTION_CREATION secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) # The secondary_date_time is in localtime normalize it to be in UTC. secondary_date_time.is_local_time = True http_headers = '' if msiecf_item.type and msiecf_item.data: if msiecf_item.type == 'cache': if msiecf_item.data[:4] == b'HTTP': # Make sure the HTTP headers are ASCII encoded. # TODO: determine correct encoding currently indications that # this could be the system narrow string codepage. try: http_headers = msiecf_item.data[:-1].decode('ascii') except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning(( 'unable to decode HTTP headers of URL record at offset: ' '0x{0:08x}. Characters that cannot be decoded will be ' 'replaced with "?" or "\\ufffd".').format( msiecf_item.offset)) http_headers = msiecf_item.data[:-1].decode( 'ascii', errors='replace') # TODO: parse data of other URL item type like history which requires # OLE VT parsing. event_data = MSIECFURLEventData() event_data.cached_filename = msiecf_item.filename event_data.cached_file_size = msiecf_item.cached_file_size event_data.cache_directory_index = msiecf_item.cache_directory_index event_data.http_headers = http_headers event_data.number_of_hits = msiecf_item.number_of_hits event_data.offset = msiecf_item.offset event_data.recovered = recovered event_data.url = msiecf_item.location if (event_data.cache_directory_index >= 0 and event_data.cache_directory_index < len(cache_directories)): event_data.cache_directory_name = ( cache_directories[event_data.cache_directory_index]) event = time_events.DateTimeValuesEvent(primary_date_time, primary_date_time_description) parser_mediator.ProduceEventWithEventData(event, event_data) if secondary_date_time.timestamp != 0: event = time_events.DateTimeValuesEvent( secondary_date_time, secondary_date_time_description, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) expiration_timestamp = msiecf_item.get_expiration_time_as_integer() if expiration_timestamp != 0: # The expiration time in MSIECF version 4.7 is stored as a FILETIME value # in version 5.2 it is stored as a FAT date time value. # Since the as_integer function returns the raw integer value we need to # apply the right conversion here. if format_version == '4.7': if expiration_timestamp == 0x7fffffffffffffff: expiration_date_time = dfdatetime_semantic_time.Never() else: expiration_date_time = dfdatetime_filetime.Filetime( timestamp=expiration_timestamp) else: if expiration_timestamp == 0xffffffff: expiration_date_time = dfdatetime_semantic_time.Never() else: expiration_date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=expiration_timestamp) event = time_events.DateTimeValuesEvent( expiration_date_time, definitions.TIME_DESCRIPTION_EXPIRATION) parser_mediator.ProduceEventWithEventData(event, event_data) last_checked_timestamp = msiecf_item.get_last_checked_time_as_integer() if last_checked_timestamp != 0: last_checked_date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=last_checked_timestamp) event = time_events.DateTimeValuesEvent( last_checked_date_time, definitions.TIME_DESCRIPTION_LAST_CHECKED) parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseGUIDTable(self, parser_mediator, cache, database, esedb_table, values_map, event_data_class): """Parses a table with a GUID as name. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (ESEDBCache): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (ESEDatabase): ESE database. esedb_table (pyesedb.table): table. values_map (dict[str, str]): mapping of table columns to event data attribute names. event_data_class (type): event data class. Raises: ValueError: if the cache, database or table value is missing. """ if cache is None: raise ValueError('Missing cache value.') if database is None: raise ValueError('Missing database value.') if esedb_table is None: raise ValueError('Missing table value.') identifier_mappings = self._GetIdentifierMappings( parser_mediator, cache, database) for esedb_record in esedb_table.records: if parser_mediator.abort: break record_values = self._GetRecordValues( parser_mediator, esedb_table.name, esedb_record, value_mappings=self._GUID_TABLE_VALUE_MAPPINGS) event_data = event_data_class() for attribute_name, column_name in values_map.items(): record_value = record_values.get(column_name, None) if attribute_name in ('application', 'user_identifier'): # Human readable versions of AppId and UserId values are stored # in the SruDbIdMapTable table; also referred to as identifier # mapping. Here we look up the numeric identifier stored in the GUID # table in SruDbIdMapTable. record_value = identifier_mappings.get( record_value, record_value) setattr(event_data, attribute_name, record_value) timestamp = record_values.get('TimeStamp') if timestamp: date_time = dfdatetime_ole_automation_date.OLEAutomationDate( timestamp=timestamp) timestamp_description = definitions.TIME_DESCRIPTION_SAMPLE else: date_time = dfdatetime_semantic_time.NotSet() timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME event = time_events.DateTimeValuesEvent(date_time, timestamp_description) parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = record_values.get('ConnectStartTime') if timestamp: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_FIRST_CONNECTED) parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseInfo2Record(self, parser_mediator, file_object, record_offset, record_size): """Parses an INFO-2 record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. record_offset (int): record offset. record_size (int): record size. Raises: ParseError: if the record cannot be read. """ record_data = self._ReadData(file_object, record_offset, record_size) record_map = self._GetDataTypeMap('recycler_info2_file_entry') try: record = self._ReadStructureFromByteStream(record_data, record_offset, record_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( ('Unable to map record data at offset: 0x{0:08x} with error: ' '{1!s}').format(record_offset, exception)) codepage = parser_mediator.codepage or 'ascii' # The original filename can contain remnant data after the end-of-string # character. ascii_filename = record.original_filename.split(b'\x00')[0] try: ascii_filename = ascii_filename.decode(codepage) except UnicodeDecodeError: ascii_filename = ascii_filename.decode(codepage, errors='replace') parser_mediator.ProduceExtractionWarning( 'unable to decode original filename.') unicode_filename = None if record_size > 280: record_offset += 280 utf16_string_map = self._GetDataTypeMap( 'recycler_info2_file_entry_utf16le_string') try: unicode_filename = self._ReadStructureFromByteStream( record_data[280:], record_offset, utf16_string_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map record data at offset: 0x{0:08x} with error: ' '{1!s}').format(record_offset, exception)) if record.deletion_time == 0: date_time = dfdatetime_semantic_time.NotSet() else: date_time = dfdatetime_filetime.Filetime( timestamp=record.deletion_time) event_data = WinRecycleBinEventData() event_data.drive_number = record.drive_number event_data.original_filename = unicode_filename or ascii_filename event_data.file_size = record.original_file_size event_data.offset = record_offset event_data.record_index = record.index if ascii_filename != unicode_filename: event_data.short_filename = ascii_filename event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_DELETED) parser_mediator.ProduceEventWithEventData(event, event_data)
def _CreateTestKey(self, key_path, time_string): """Creates Registry keys and values for testing. Args: key_path (str): Windows Registry key path. time_string (str): key last written date and time. Returns: dfwinreg.WinRegistryKey: a Windows Registry key. """ filetime = dfdatetime_filetime.Filetime() filetime.CopyFromDateTimeString(time_string) registry_key = dfwinreg_fake.FakeWinRegistryKey( 'TestDriver', key_path=key_path, last_written_time=filetime.timestamp, offset=1456) value_data = b'\x02\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Type', data=value_data, data_type=dfwinreg_definitions.REG_DWORD, offset=123) registry_key.AddValue(registry_value) value_data = b'\x02\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Start', data=value_data, data_type=dfwinreg_definitions.REG_DWORD, offset=127) registry_key.AddValue(registry_value) value_data = b'\x01\x00\x00\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ErrorControl', data=value_data, data_type=dfwinreg_definitions.REG_DWORD, offset=131) registry_key.AddValue(registry_value) value_data = 'Pnp Filter'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'Group', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=140) registry_key.AddValue(registry_value) value_data = 'Test Driver'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DisplayName', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=160) registry_key.AddValue(registry_value) value_data = 'testdriver.inf_x86_neutral_dd39b6b0a45226c4'.encode( 'utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'DriverPackageId', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=180) registry_key.AddValue(registry_value) value_data = 'C:\\Dell\\testdriver.sys'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ImagePath', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=200) registry_key.AddValue(registry_value) return registry_key
def ParseFileLNKFile( self, parser_mediator, file_object, display_name): """Parses a Windows Shortcut (LNK) file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. display_name (str): display name. """ lnk_file = pylnk.file() lnk_file.set_ascii_codepage(parser_mediator.codepage) try: lnk_file.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionError( 'unable to open file with error: {0!s}'.format(exception)) return link_target = None if lnk_file.link_target_identifier_data: # TODO: change file_entry.name to display name once it is generated # correctly. display_name = parser_mediator.GetFilename() shell_items_parser = shell_items.ShellItemsParser(display_name) shell_items_parser.ParseByteStream( parser_mediator, lnk_file.link_target_identifier_data, codepage=parser_mediator.codepage) link_target = shell_items_parser.CopyToPath() event_data = WinLnkLinkEventData() event_data.birth_droid_file_identifier = ( lnk_file.birth_droid_file_identifier) event_data.birth_droid_volume_identifier = ( lnk_file.birth_droid_volume_identifier) event_data.command_line_arguments = lnk_file.command_line_arguments event_data.description = lnk_file.description event_data.drive_serial_number = lnk_file.drive_serial_number event_data.drive_type = lnk_file.drive_type event_data.droid_file_identifier = lnk_file.droid_file_identifier event_data.droid_volume_identifier = lnk_file.droid_volume_identifier event_data.env_var_location = lnk_file.environment_variables_location event_data.file_attribute_flags = lnk_file.file_attribute_flags event_data.file_size = lnk_file.file_size event_data.icon_location = lnk_file.icon_location event_data.link_target = link_target event_data.local_path = lnk_file.local_path event_data.network_path = lnk_file.network_path event_data.relative_path = lnk_file.relative_path event_data.volume_label = lnk_file.volume_label event_data.working_directory = lnk_file.working_directory access_time = lnk_file.get_file_access_time_as_integer() if access_time != 0: date_time = dfdatetime_filetime.Filetime(timestamp=access_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS) parser_mediator.ProduceEventWithEventData(event, event_data) creation_time = lnk_file.get_file_creation_time_as_integer() if creation_time != 0: date_time = dfdatetime_filetime.Filetime(timestamp=creation_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) modification_time = lnk_file.get_file_modification_time_as_integer() if modification_time != 0: date_time = dfdatetime_filetime.Filetime(timestamp=modification_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) if access_time == 0 and creation_time == 0 and modification_time == 0: date_time = dfdatetime_semantic_time.SemanticTime('Not set') event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME) parser_mediator.ProduceEventWithEventData(event, event_data) if lnk_file.droid_file_identifier: try: self._ParseDistributedTrackingIdentifier( parser_mediator, lnk_file.droid_file_identifier, display_name) except (TypeError, ValueError) as exception: parser_mediator.ProduceExtractionError( 'unable to read droid file identifier with error: {0!s}.'.format( exception)) if lnk_file.birth_droid_file_identifier: try: self._ParseDistributedTrackingIdentifier( parser_mediator, lnk_file.birth_droid_file_identifier, display_name) except (TypeError, ValueError) as exception: parser_mediator.ProduceExtractionError(( 'unable to read birth droid file identifier with error: ' '{0!s}.').format(exception)) lnk_file.close()
def _CreateTestKey(self, key_path, time_string): """Creates Registry keys and values for testing. Args: key_path (str): Windows Registry key path. time_string (str): key last written date and time. Returns: dfwinreg.WinRegistryKey: a Windows Registry key. """ filetime = dfdatetime_filetime.Filetime() filetime.CopyFromDateTimeString(time_string) registry_key = dfwinreg_fake.FakeWinRegistryKey( 'Session Manager', key_path=key_path, last_written_time=filetime.timestamp, offset=153) value_data = 'autocheck autochk *\x00'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'BootExecute', data=value_data, data_type=dfwinreg_definitions.REG_MULTI_SZ, offset=123) registry_key.AddValue(registry_value) value_data = '2592000'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'CriticalSectionTimeout', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=153) registry_key.AddValue(registry_value) value_data = '\x00'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'ExcludeFromKnownDlls', data=value_data, data_type=dfwinreg_definitions.REG_MULTI_SZ, offset=163) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'GlobalFlag', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=173) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'HeapDeCommitFreeBlockThreshold', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=183) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'HeapDeCommitTotalFreeThreshold', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=203) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'HeapSegmentCommit', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=213) registry_key.AddValue(registry_value) value_data = '0'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'HeapSegmentReserve', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=223) registry_key.AddValue(registry_value) value_data = '2'.encode('utf_16_le') registry_value = dfwinreg_fake.FakeWinRegistryValue( 'NumberOfInitialSessions', data=value_data, data_type=dfwinreg_definitions.REG_SZ, offset=243) registry_key.AddValue(registry_value) return registry_key
def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal): """Parses an USN change journal. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. usn_change_journal (pyfsntsfs.usn_change_journal): USN change journal. Raises: ParseError: if an USN change journal record cannot be parsed. """ if not usn_change_journal: return usn_record_map = self._GetDataTypeMap('usn_record_v2') usn_record_data = usn_change_journal.read_usn_record() while usn_record_data: current_offset = usn_change_journal.get_offset() try: usn_record = self._ReadStructureFromByteStream( usn_record_data, current_offset, usn_record_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to parse USN record at offset: 0x{0:08x} with error: ' '{1!s}').format(current_offset, exception)) # Per MSDN we need to use name offset for forward compatibility. name_offset = usn_record.name_offset - 60 utf16_stream = usn_record.name[name_offset:usn_record.name_size] try: name_string = utf16_stream.decode('utf-16-le') except (UnicodeDecodeError, UnicodeEncodeError) as exception: name_string = utf16_stream.decode('utf-16-le', errors='replace') parser_mediator.ProduceExtractionError(( 'unable to decode USN record name string with error: ' '{0:s}. Characters that cannot be decoded will be replaced ' 'with "?" or "\\ufffd".').format(exception)) event_data = NTFSUSNChangeEventData() event_data.file_attribute_flags = usn_record.file_attribute_flags event_data.file_reference = usn_record.file_reference event_data.filename = name_string event_data.offset = current_offset event_data.parent_file_reference = usn_record.parent_file_reference event_data.update_reason_flags = usn_record.update_reason_flags event_data.update_sequence_number = usn_record.update_sequence_number event_data.update_source_flags = usn_record.update_source_flags if not usn_record.update_date_time: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=usn_record.update_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) usn_record_data = usn_change_journal.read_usn_record()
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): """Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. """ names_key = registry_key.GetSubkeyByName(u'Names') if not names_key: parser_mediator.ProduceExtractionError(u'missing subkey: Names.') return last_written_time_per_username = { registry_value.name: registry_value.last_written_time for registry_value in names_key.GetSubkeys() } for subkey in registry_key.GetSubkeys(): if subkey.name == u'Names': continue f_value = subkey.GetValueByName(u'F') if not f_value: parser_mediator.ProduceExtractionError( u'missing Registry value: "F" in subkey: {0:s}.'.format( subkey.name)) continue v_value = subkey.GetValueByName(u'V') if not v_value: parser_mediator.ProduceExtractionError( u'missing Registry value: "V" in subkey: {0:s}.'.format( subkey.name)) continue try: f_data_struct = self._F_VALUE_STRUCT.parse(f_value.data) except construct.FieldError as exception: parser_mediator.ProduceExtractionError( (u'unable to parse Registry value: "F" in subkey: {0:s} ' u'with error: {1:s}.').format(subkey.name, exception)) continue try: v_data_struct = self._V_VALUE_HEADER.parse(v_value.data) except construct.FieldError as exception: parser_mediator.ProduceExtractionError( (u'unable to parse Registry value: "V" in subkey: {0:s} ' u'with error: {1:s}.').format(subkey.name, exception)) continue v_header_values = v_data_struct.values()[0] data_start_offset = v_header_values[ 3] + self._V_VALUE_STRINGS_OFFSET data_end_offset = v_header_values[4] + data_start_offset utf16_stream = v_value.data[data_start_offset:data_end_offset] try: username = utf16_stream.decode(u'utf-16-le') except (UnicodeDecodeError, UnicodeEncodeError) as exception: username = utf16_stream.decode(u'utf-16-le', errors=u'replace') parser_mediator.ProduceExtractionError(( u'unable to decode username string with error: {0:s}. Characters ' u'that cannot be decoded will be replaced with "?" or ' u'"\\ufffd".').format(exception)) data_start_offset = v_header_values[ 6] + self._V_VALUE_STRINGS_OFFSET data_end_offset = v_header_values[7] + data_start_offset utf16_stream = v_value.data[data_start_offset:data_end_offset] try: fullname = utf16_stream.decode(u'utf-16-le') except (UnicodeDecodeError, UnicodeEncodeError) as exception: fullname = utf16_stream.decode(u'utf-16-le', errors=u'replace') parser_mediator.ProduceExtractionError(( u'unable to decode fullname string with error: {0:s}. Characters ' u'that cannot be decoded will be replaced with "?" or ' u'"\\ufffd".').format(exception)) data_start_offset = v_header_values[ 9] + self._V_VALUE_STRINGS_OFFSET data_end_offset = v_header_values[10] + data_start_offset utf16_stream = v_value.data[data_start_offset:data_end_offset] try: comments = utf16_stream.decode(u'utf-16-le') except (UnicodeDecodeError, UnicodeEncodeError) as exception: comments = utf16_stream.decode(u'utf-16-le', errors=u'replace') parser_mediator.ProduceExtractionError(( u'unable to decode comments string with error: {0:s}. Characters ' u'that cannot be decoded will be replaced with "?" or ' u'"\\ufffd".').format(exception)) last_written_time = last_written_time_per_username.get( username, None) # TODO: check if subkey.name == f_data_struct.rid if last_written_time: values_dict = { u'account_rid': f_data_struct.rid, u'login_count': f_data_struct.login_count } if username: values_dict[u'username'] = username if fullname: values_dict[u'full_name'] = fullname if comments: values_dict[u'comments'] = comments event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = values_dict event_data.source_append = self._SOURCE_APPEND event = time_events.DateTimeValuesEvent( last_written_time, eventdata.EventTimestamp.WRITTEN_TIME) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = SAMUsersWindowsRegistryEventData() event_data.account_rid = f_data_struct.rid event_data.comments = comments event_data.fullname = fullname event_data.key_path = registry_key.path event_data.login_count = f_data_struct.login_count event_data.offset = f_value.offset event_data.username = username if f_data_struct.last_login != 0: date_time = dfdatetime_filetime.Filetime( timestamp=f_data_struct.last_login) event = time_events.DateTimeValuesEvent( date_time, eventdata.EventTimestamp.LAST_LOGIN_TIME) parser_mediator.ProduceEventWithEventData(event, event_data) if f_data_struct.password_reset != 0: date_time = dfdatetime_filetime.Filetime( timestamp=f_data_struct.password_reset) event = time_events.DateTimeValuesEvent( date_time, eventdata.EventTimestamp.LAST_PASSWORD_RESET) parser_mediator.ProduceEventWithEventData(event, event_data)
def ParseDestList(self, parser_mediator, olecf_item): """Parses the DestList OLECF item. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. olecf_item (pyolecf.item): OLECF item. """ try: header = self._DEST_LIST_STREAM_HEADER.parse_stream(olecf_item) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( 'Unable to parse DestList header with error: {0!s}'.format( exception)) if header.format_version not in (1, 3, 4): parser_mediator.ProduceExtractionError( 'unsupported format version: {0:d}.'.format(header.format_version)) if header.format_version == 1: dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V1 elif header.format_version in (3, 4): dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V3 entry_offset = olecf_item.get_offset() while entry_offset < olecf_item.size: try: entry = dest_list_stream_entry.parse_stream(olecf_item) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( 'Unable to parse DestList entry with error: {0!s}'.format( exception)) if not entry: break display_name = 'DestList entry at offset: 0x{0:08x}'.format(entry_offset) try: droid_volume_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.droid_volume_identifier, display_name) except (TypeError, ValueError) as exception: droid_volume_identifier = '' parser_mediator.ProduceExtractionError( 'unable to read droid volume identifier with error: {0!s}'.format( exception)) try: droid_file_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.droid_file_identifier, display_name) except (TypeError, ValueError) as exception: droid_file_identifier = '' parser_mediator.ProduceExtractionError( 'unable to read droid file identifier with error: {0!s}'.format( exception)) try: birth_droid_volume_identifier = ( self._ParseDistributedTrackingIdentifier( parser_mediator, entry.birth_droid_volume_identifier, display_name)) except (TypeError, ValueError) as exception: birth_droid_volume_identifier = '' parser_mediator.ProduceExtractionError(( 'unable to read birth droid volume identifier with error: ' '{0:s}').format( exception)) try: birth_droid_file_identifier = self._ParseDistributedTrackingIdentifier( parser_mediator, entry.birth_droid_file_identifier, display_name) except (TypeError, ValueError) as exception: birth_droid_file_identifier = '' parser_mediator.ProduceExtractionError(( 'unable to read birth droid file identifier with error: ' '{0:s}').format( exception)) if entry.last_modification_time == 0: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=entry.last_modification_time) event_data = AutomaticDestinationsDestListEntryEventData() event_data.birth_droid_file_identifier = birth_droid_file_identifier event_data.birth_droid_volume_identifier = birth_droid_volume_identifier event_data.droid_file_identifier = droid_file_identifier event_data.droid_volume_identifier = droid_volume_identifier event_data.entry_number = entry.entry_number event_data.hostname = binary.ByteStreamCopyToString( entry.hostname, codepage='ascii') event_data.offset = entry_offset event_data.path = binary.UTF16StreamCopyToString(entry.path) event_data.pin_status = entry.pin_status event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) entry_offset = olecf_item.get_offset()