def testGetItems(self): """Test the __getitem__ function.""" test_path = self._GetTestFilePath(['password.txt']) self._SkipIfPathNotExists(test_path) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=test_path) file_object = resolver.Resolver.OpenFileObject( test_os_path_spec, resolver_context=self._resolver_context) file_data = data_slice.DataSlice(file_object) # Test linear read. self.assertEqual(file_data[:20], b'place,user,password\n') self.assertEqual(file_data[20:44], b'bank,joesmith,superrich\n') self.assertEqual(file_data[44:64], b'alarm system,-,1234\n') self.assertEqual(file_data[64:86], b'treasure chest,-,1111\n') self.assertEqual(file_data[86:], b'uber secret laire,admin,admin\n') # Test non-linear read. self.assertEqual(file_data[44:64], b'alarm system,-,1234\n') self.assertEqual(file_data[86:], b'uber secret laire,admin,admin\n') self.assertEqual(file_data[20:44], b'bank,joesmith,superrich\n') self.assertEqual(file_data[:20], b'place,user,password\n') self.assertEqual(file_data[64:86], b'treasure chest,-,1111\n') # Test edge cases. self.assertEqual(file_data[-150:20], b'place,user,password\n') self.assertEqual(file_data[86:150], b'uber secret laire,admin,admin\n') with self.assertRaises(TypeError): file_data['key'] # pylint: disable=pointless-statement with self.assertRaises(ValueError): file_data[44:64:2] # pylint: disable=pointless-statement
def testLen(self): """Test the __len__ function.""" test_path = self._GetTestFilePath(['password.txt']) self._SkipIfPathNotExists(test_path) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=test_path) file_object = resolver.Resolver.OpenFileObject( test_os_path_spec, resolver_context=self._resolver_context) file_data = data_slice.DataSlice(file_object) self.assertEqual(len(file_data), 116)
def ParseFileObject(self, parser_mediator, file_object): """Parses a Portable Executable (PE) file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfVFS. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ pe_data_slice = dfvfs_data_slice.DataSlice(file_object) try: pefile_object = pefile.PE(data=pe_data_slice, fast_load=True) pefile_object.parse_data_directories( directories=self._PE_DIRECTORIES) except Exception as exception: raise errors.UnableToParseFile( 'Unable to read PE file with error: {0!s}'.format(exception)) event_data = PEEventData() # Note that the result of get_imphash() is an empty string if there is no # import hash. event_data.imphash = pefile_object.get_imphash() or None event_data.pe_type = self._GetPEType(pefile_object) event_data.section_names = self._GetSectionNames(pefile_object) timestamp = getattr(pefile_object.FILE_HEADER, 'TimeDateStamp', None) if timestamp: date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) else: date_time = dfdatetime_semantic_time.NotSet() event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) self._ParseExportTable(parser_mediator, pefile_object, event_data) self._ParseImportTable(parser_mediator, pefile_object, event_data) self._ParseLoadConfigurationTable(parser_mediator, pefile_object, event_data) self._ParseDelayImportTable(parser_mediator, pefile_object, event_data) self._ParseResourceSection(parser_mediator, pefile_object, event_data)
def testLen(self): """Test the __len__ function.""" test_file = self._GetTestFilePath(['password.txt']) self._SkipIfPathNotExists(test_file) test_path_spec = os_path_spec.OSPathSpec(location=test_file) file_object = os_file_io.OSFile(self._resolver_context) file_object.open(test_path_spec) try: file_data = data_slice.DataSlice(file_object) self.assertEqual(len(file_data), 116) finally: file_object.close()
def testGetItems(self): """Test the __getitem__ function.""" test_file = self._GetTestFilePath(['password.txt']) self._SkipIfPathNotExists(test_file) test_path_spec = os_path_spec.OSPathSpec(location=test_file) file_object = os_file_io.OSFile(self._resolver_context) file_object.open(test_path_spec) try: file_data = data_slice.DataSlice(file_object) # Test linear read. self.assertEqual(file_data[:20], b'place,user,password\n') self.assertEqual(file_data[20:44], b'bank,joesmith,superrich\n') self.assertEqual(file_data[44:64], b'alarm system,-,1234\n') self.assertEqual(file_data[64:86], b'treasure chest,-,1111\n') self.assertEqual(file_data[86:], b'uber secret laire,admin,admin\n') # Test non-linear read. self.assertEqual(file_data[44:64], b'alarm system,-,1234\n') self.assertEqual(file_data[86:], b'uber secret laire,admin,admin\n') self.assertEqual(file_data[20:44], b'bank,joesmith,superrich\n') self.assertEqual(file_data[:20], b'place,user,password\n') self.assertEqual(file_data[64:86], b'treasure chest,-,1111\n') # Test edge cases. self.assertEqual(file_data[-150:20], b'place,user,password\n') self.assertEqual(file_data[86:150], b'uber secret laire,admin,admin\n') with self.assertRaises(TypeError): file_data['key'] # pylint: disable=pointless-statement with self.assertRaises(ValueError): file_data[44:64:2] # pylint: disable=pointless-statement finally: file_object.close()
def ParseFileObject(self, parser_mediator, file_object): """Parses a Portable Executable (PE) file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ pe_data_slice = dfvfs_data_slice.DataSlice(file_object) try: pefile_object = pefile.PE(data=pe_data_slice, fast_load=True) pefile_object.parse_data_directories( directories=self._PE_DIRECTORIES) except Exception as exception: raise errors.UnableToParseFile( 'Unable to read PE file with error: {0!s}'.format(exception)) event_data = PEEventData() event_data.imphash = pefile_object.get_imphash() event_data.pe_type = self._GetPEType(pefile_object) event_data.section_names = self._GetSectionNames(pefile_object) # TODO: remove after refactoring the pe event formatter. event_data.data_type = 'pe:compilation:compilation_time' timestamp = getattr(pefile_object.FILE_HEADER, 'TimeDateStamp', None) # TODO: handle timestamp is None. date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) for dll_name, timestamp in self._GetImportTimestamps(pefile_object): if timestamp: event_data.dll_name = dll_name event_data.data_type = 'pe:import:import_time' date_time = dfdatetime_posix_time.PosixTime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) for dll_name, timestamp in self._GetDelayImportTimestamps( pefile_object): if timestamp: event_data.dll_name = dll_name event_data.data_type = 'pe:delay_import:import_time' date_time = dfdatetime_posix_time.PosixTime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) event_data.dll_name = None for timestamp in self._GetResourceTimestamps(pefile_object): if timestamp: event_data.data_type = 'pe:resource:creation_time' date_time = dfdatetime_posix_time.PosixTime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = self._GetLoadConfigTimestamp(pefile_object) if timestamp: event_data.data_type = 'pe:load_config:modification_time' date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data)